Package pyffi :: Package formats :: Package nif
[hide private]
[frames] | no frames]

Source Code for Package pyffi.formats.nif

   1  """ 
   2  :mod:`pyffi.formats.nif` --- NetImmerse/Gamebryo (.nif and .kf) 
   3  =============================================================== 
   4   
   5  Implementation 
   6  -------------- 
   7   
   8  .. autoclass:: NifFormat 
   9     :show-inheritance: 
  10     :members: 
  11   
  12  Regression tests 
  13  ---------------- 
  14   
  15  These tests are used to check for functionality and bugs in the library. 
  16  They also provide code examples which you may find useful. 
  17   
  18  Read a NIF file 
  19  ^^^^^^^^^^^^^^^ 
  20   
  21  >>> stream = open('tests/nif/test.nif', 'rb') 
  22  >>> data = NifFormat.Data() 
  23  >>> # inspect is optional; it will not read the actual blocks 
  24  >>> data.inspect(stream) 
  25  >>> hex(data.version) 
  26  '0x14010003' 
  27  >>> data.user_version 
  28  0 
  29  >>> for blocktype in data.header.block_types: 
  30  ...     print(blocktype.decode("ascii")) 
  31  NiNode 
  32  NiTriShape 
  33  NiTriShapeData 
  34  >>> data.roots # blocks have not been read yet, so this is an empty list 
  35  [] 
  36  >>> data.read(stream) 
  37  >>> for root in data.roots: 
  38  ...     for block in root.tree(): 
  39  ...         if isinstance(block, NifFormat.NiNode): 
  40  ...             print(block.name.decode("ascii")) 
  41  test 
  42  >>> stream.close() 
  43   
  44  Parse all NIF files in a directory tree 
  45  ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 
  46   
  47  >>> for stream, data in NifFormat.walkData('tests/nif'): 
  48  ...     try: 
  49  ...         # the replace call makes the doctest also pass on windows 
  50  ...         print("reading %s" % stream.name.replace("\\\\", "/")) 
  51  ...         data.read(stream) 
  52  ...     except Exception: 
  53  ...         print( 
  54  ...             "Warning: read failed due corrupt file," 
  55  ...             " corrupt format description, or bug.") # doctest: +REPORT_NDIFF 
  56  reading tests/nif/invalid.nif 
  57  Warning: read failed due corrupt file, corrupt format description, or bug. 
  58  reading tests/nif/nds.nif 
  59  reading tests/nif/neosteam.nif 
  60  reading tests/nif/test.nif 
  61  reading tests/nif/test_centerradius.nif 
  62  reading tests/nif/test_check_tangentspace1.nif 
  63  reading tests/nif/test_check_tangentspace2.nif 
  64  reading tests/nif/test_check_tangentspace3.nif 
  65  reading tests/nif/test_check_tangentspace4.nif 
  66  reading tests/nif/test_convexverticesshape.nif 
  67  reading tests/nif/test_dump_tex.nif 
  68  reading tests/nif/test_fix_clampmaterialalpha.nif 
  69  reading tests/nif/test_fix_cleanstringpalette.nif 
  70  reading tests/nif/test_fix_detachhavoktristripsdata.nif 
  71  reading tests/nif/test_fix_disableparallax.nif 
  72  reading tests/nif/test_fix_ffvt3rskinpartition.nif 
  73  reading tests/nif/test_fix_mergeskeletonroots.nif 
  74  reading tests/nif/test_fix_tangentspace.nif 
  75  reading tests/nif/test_fix_texturepath.nif 
  76  reading tests/nif/test_grid_128x128.nif 
  77  reading tests/nif/test_grid_64x64.nif 
  78  reading tests/nif/test_mopp.nif 
  79  reading tests/nif/test_opt_collision_complex_mopp.nif 
  80  reading tests/nif/test_opt_collision_mopp.nif 
  81  reading tests/nif/test_opt_collision_packed.nif 
  82  reading tests/nif/test_opt_collision_to_boxshape.nif 
  83  reading tests/nif/test_opt_collision_to_boxshape_notabox.nif 
  84  reading tests/nif/test_opt_collision_unpacked.nif 
  85  reading tests/nif/test_opt_delunusedbones.nif 
  86  reading tests/nif/test_opt_dupgeomdata.nif 
  87  reading tests/nif/test_opt_dupverts.nif 
  88  reading tests/nif/test_opt_emptyproperties.nif 
  89  reading tests/nif/test_opt_grid_layout.nif 
  90  reading tests/nif/test_opt_mergeduplicates.nif 
  91  reading tests/nif/test_opt_vertex_cache.nif 
  92  reading tests/nif/test_opt_zeroscale.nif 
  93  reading tests/nif/test_skincenterradius.nif 
  94  reading tests/nif/test_vertexcolor.nif 
  95   
  96  Create a NIF model from scratch and write to file 
  97  ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 
  98   
  99  >>> root = NifFormat.NiNode() 
 100  >>> root.name = 'Scene Root' 
 101  >>> blk = NifFormat.NiNode() 
 102  >>> root.add_child(blk) 
 103  >>> blk.name = 'new block' 
 104  >>> blk.scale = 2.4 
 105  >>> blk.translation.x = 3.9 
 106  >>> blk.rotation.m_11 = 1.0 
 107  >>> blk.rotation.m_22 = 1.0 
 108  >>> blk.rotation.m_33 = 1.0 
 109  >>> ctrl = NifFormat.NiVisController() 
 110  >>> ctrl.flags = 0x000c 
 111  >>> ctrl.target = blk 
 112  >>> blk.add_controller(ctrl) 
 113  >>> blk.add_controller(NifFormat.NiAlphaController()) 
 114  >>> strips = NifFormat.NiTriStrips() 
 115  >>> root.add_child(strips, front = True) 
 116  >>> strips.name = "hello world" 
 117  >>> strips.rotation.m_11 = 1.0 
 118  >>> strips.rotation.m_22 = 1.0 
 119  >>> strips.rotation.m_33 = 1.0 
 120  >>> data = NifFormat.NiTriStripsData() 
 121  >>> strips.data = data 
 122  >>> data.num_vertices = 5 
 123  >>> data.has_vertices = True 
 124  >>> data.vertices.update_size() 
 125  >>> for i, v in enumerate(data.vertices): 
 126  ...     v.x = 1.0+i/10.0 
 127  ...     v.y = 0.2+1.0/(i+1) 
 128  ...     v.z = 0.03 
 129  >>> data.update_center_radius() 
 130  >>> data.num_strips = 2 
 131  >>> data.strip_lengths.update_size() 
 132  >>> data.strip_lengths[0] = 3 
 133  >>> data.strip_lengths[1] = 4 
 134  >>> data.has_points = True 
 135  >>> data.points.update_size() 
 136  >>> data.points[0][0] = 0 
 137  >>> data.points[0][1] = 1 
 138  >>> data.points[0][2] = 2 
 139  >>> data.points[1][0] = 1 
 140  >>> data.points[1][1] = 2 
 141  >>> data.points[1][2] = 3 
 142  >>> data.points[1][3] = 4 
 143  >>> data.num_uv_sets = 1 
 144  >>> data.has_uv = True 
 145  >>> data.uv_sets.update_size() 
 146  >>> for i, v in enumerate(data.uv_sets[0]): 
 147  ...     v.u = 1.0-i/10.0 
 148  ...     v.v = 1.0/(i+1) 
 149  >>> data.has_normals = True 
 150  >>> data.normals.update_size() 
 151  >>> for i, v in enumerate(data.normals): 
 152  ...     v.x = 0.0 
 153  ...     v.y = 0.0 
 154  ...     v.z = 1.0 
 155  >>> strips.update_tangent_space() 
 156  >>> from tempfile import TemporaryFile 
 157  >>> stream = TemporaryFile() 
 158  >>> nifdata = NifFormat.Data(version=0x14010003, user_version=10) 
 159  >>> nifdata.roots = [root] 
 160  >>> nifdata.write(stream) 
 161  >>> stream.close() 
 162   
 163  Get list of versions and games 
 164  ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 
 165   
 166  >>> for vnum in sorted(NifFormat.versions.values()): 
 167  ...     print('0x%08X' % vnum) # doctest: +REPORT_UDIFF 
 168  0x02030000 
 169  0x03000000 
 170  0x03000300 
 171  0x03010000 
 172  0x0303000D 
 173  0x04000000 
 174  0x04000002 
 175  0x0401000C 
 176  0x04020002 
 177  0x04020100 
 178  0x04020200 
 179  0x0A000100 
 180  0x0A000102 
 181  0x0A000103 
 182  0x0A010000 
 183  0x0A010065 
 184  0x0A01006A 
 185  0x0A020000 
 186  0x0A020001 
 187  0x0A040001 
 188  0x14000004 
 189  0x14000005 
 190  0x14010003 
 191  0x14020007 
 192  0x14020008 
 193  0x14030001 
 194  0x14030002 
 195  0x14030003 
 196  0x14030006 
 197  0x14030009 
 198  0x14050000 
 199  0x14060000 
 200  0x14060500 
 201  0x1E000002 
 202  >>> for game, versions in sorted(NifFormat.games.items(), key=lambda x: x[0]): 
 203  ...     print("%s " % game + " ".join('0x%08X' % vnum for vnum in versions)) # doctest: +REPORT_UDIFF 
 204  ? 0x0A000103 
 205  Atlantica 0x14020008 
 206  Axis and Allies 0x0A010000 
 207  Bully SE 0x14030009 
 208  Civilization IV 0x04020002 0x04020100 0x04020200 0x0A000100 0x0A010000 \ 
 209  0x0A020000 0x14000004 
 210  Culpa Innata 0x04020200 
 211  Dark Age of Camelot 0x02030000 0x03000300 0x03010000 0x0401000C 0x04020100 \ 
 212  0x04020200 0x0A010000 
 213  Divinity 2 0x14030009 
 214  Emerge 0x14020007 0x14020008 0x14030001 0x14030002 0x14030003 0x14030006 \ 
 215  0x1E000002 
 216  Empire Earth II 0x04020200 0x0A010000 
 217  Empire Earth III 0x14020007 0x14020008 
 218  Entropia Universe 0x0A010000 
 219  Epic Mickey 0x14060500 
 220  Fallout 3 0x14020007 
 221  Freedom Force 0x04000000 0x04000002 
 222  Freedom Force vs. the 3rd Reich 0x0A010000 
 223  Howling Sword 0x14030009 
 224  Kohan 2 0x0A010000 
 225  KrazyRain 0x14050000 0x14060000 
 226  Lazeska 0x14030009 
 227  Loki 0x0A020000 
 228  Megami Tensei: Imagine 0x14010003 
 229  Morrowind 0x04000002 
 230  NeoSteam 0x0A010000 
 231  Oblivion 0x0303000D 0x0A000100 0x0A000102 0x0A010065 0x0A01006A 0x0A020000 0x14000004 \ 
 232  0x14000005 
 233  Prison Tycoon 0x0A020000 
 234  Pro Cycling Manager 0x0A020000 
 235  Red Ocean 0x0A020000 
 236  Sid Meier's Railroads 0x14000004 
 237  Star Trek: Bridge Commander 0x03000000 0x03010000 
 238  The Guild 2 0x0A010000 
 239  Warhammer 0x14030009 
 240  Wildlife Park 2 0x0A010000 0x0A020000 
 241  Worldshift 0x0A020001 0x0A040001 
 242  Zoo Tycoon 2 0x0A000100 
 243   
 244  Reading an unsupported nif file 
 245  ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 
 246   
 247  >>> stream = open('tests/nif/invalid.nif', 'rb') 
 248  >>> data = NifFormat.Data() 
 249  >>> data.inspect(stream) # the file seems ok on inspection 
 250  >>> data.read(stream) # doctest: +ELLIPSIS 
 251  Traceback (most recent call last): 
 252      ... 
 253  ValueError: ... 
 254  >>> stream.close() 
 255   
 256  Template types 
 257  ^^^^^^^^^^^^^^ 
 258   
 259  >>> block = NifFormat.NiTextKeyExtraData() 
 260  >>> block.num_text_keys = 1 
 261  >>> block.text_keys.update_size() 
 262  >>> block.text_keys[0].time = 1.0 
 263  >>> block.text_keys[0].value = 'hi' 
 264   
 265  Links 
 266  ^^^^^ 
 267   
 268  >>> NifFormat.NiNode._has_links 
 269  True 
 270  >>> NifFormat.NiBone._has_links 
 271  True 
 272  >>> skelroot = NifFormat.NiNode() 
 273  >>> geom = NifFormat.NiTriShape() 
 274  >>> geom.skin_instance = NifFormat.NiSkinInstance() 
 275  >>> geom.skin_instance.skeleton_root = skelroot 
 276  >>> [block.__class__.__name__ for block in geom.get_refs()] 
 277  ['NiSkinInstance'] 
 278  >>> [block.__class__.__name__ for block in geom.get_links()] 
 279  ['NiSkinInstance'] 
 280  >>> [block.__class__.__name__ for block in geom.skin_instance.get_refs()] 
 281  [] 
 282  >>> [block.__class__.__name__ for block in geom.skin_instance.get_links()] 
 283  ['NiNode'] 
 284   
 285  Strings 
 286  ^^^^^^^ 
 287   
 288  >>> extra = NifFormat.NiTextKeyExtraData() 
 289  >>> extra.num_text_keys = 2 
 290  >>> extra.text_keys.update_size() 
 291  >>> extra.text_keys[0].time = 0.0 
 292  >>> extra.text_keys[0].value = "start" 
 293  >>> extra.text_keys[1].time = 2.0 
 294  >>> extra.text_keys[1].value = "end" 
 295  >>> for extrastr in extra.get_strings(None): 
 296  ...     print(extrastr.decode("ascii")) 
 297  start 
 298  end 
 299  """ 
 300   
 301  # ***** BEGIN LICENSE BLOCK ***** 
 302  # 
 303  # Copyright (c) 2007-2011, NIF File Format Library and Tools. 
 304  # All rights reserved. 
 305  # 
 306  # Redistribution and use in source and binary forms, with or without 
 307  # modification, are permitted provided that the following conditions 
 308  # are met: 
 309  # 
 310  #    * Redistributions of source code must retain the above copyright 
 311  #      notice, this list of conditions and the following disclaimer. 
 312  # 
 313  #    * Redistributions in binary form must reproduce the above 
 314  #      copyright notice, this list of conditions and the following 
 315  #      disclaimer in the documentation and/or other materials provided 
 316  #      with the distribution. 
 317  # 
 318  #    * Neither the name of the NIF File Format Library and Tools 
 319  #      project nor the names of its contributors may be used to endorse 
 320  #      or promote products derived from this software without specific 
 321  #      prior written permission. 
 322  # 
 323  # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 
 324  # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 
 325  # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS 
 326  # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE 
 327  # COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, 
 328  # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, 
 329  # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 
 330  # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 
 331  # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 
 332  # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN 
 333  # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 
 334  # POSSIBILITY OF SUCH DAMAGE. 
 335  # 
 336  # ***** END LICENSE BLOCK ***** 
 337   
 338  from itertools import izip, repeat, chain 
 339  import logging 
 340  import math # math.pi 
 341  import os 
 342  import re 
 343  import struct 
 344  import sys 
 345  import warnings 
 346  import weakref 
 347   
 348  import pyffi.formats.bsa 
 349  import pyffi.formats.dds 
 350  import pyffi.object_models.common 
 351  import pyffi.object_models 
 352  from pyffi.object_models.xml import FileFormat 
 353  import pyffi.utils.inertia 
 354  from pyffi.utils.mathutils import * # XXX todo get rid of from XXX import * 
 355  import pyffi.utils.mopp 
 356  import pyffi.utils.tristrip 
 357  import pyffi.utils.vertex_cache 
 358  import pyffi.utils.quickhull 
 359  # XXX convert the following to absolute imports 
 360  from pyffi.object_models.editable import EditableBoolComboBox 
 361  from pyffi.utils.graph import EdgeFilter 
 362  from pyffi.object_models.xml.basic import BasicBase 
 363  from pyffi.object_models.xml.struct_ import StructBase 
364 365 366 367 -class NifFormat(FileFormat):
368 """This class contains the generated classes from the xml.""" 369 xml_file_name = 'nif.xml' 370 # where to look for nif.xml and in what order: NIFXMLPATH env var, 371 # or NifFormat module directory 372 xml_file_path = [os.getenv('NIFXMLPATH'), 373 os.path.join(os.path.dirname(__file__), "nifxml")] 374 # filter for recognizing nif files by extension 375 # .kf are nif files containing keyframes 376 # .kfa are nif files containing keyframes in DAoC style 377 # .nifcache are Empire Earth II nif files 378 # .texcache are Empire Earth II/III packed texture nif files 379 # .pcpatch are Empire Earth II/III packed texture nif files 380 # .item are Divinity 2 nif files 381 # .nft are Bully SE nif files (containing textures) 382 # .nif_wii are Epic Mickey nif files 383 RE_FILENAME = re.compile(r'^.*\.(nif|kf|kfa|nifcache|jmi|texcache|pcpatch|nft|item|nif_wii)$', re.IGNORECASE) 384 # archives 385 ARCHIVE_CLASSES = [pyffi.formats.bsa.BsaFormat] 386 # used for comparing floats 387 EPSILON = 0.0001 388 389 # basic types 390 ulittle32 = pyffi.object_models.common.ULittle32 391 int = pyffi.object_models.common.Int 392 uint = pyffi.object_models.common.UInt 393 byte = pyffi.object_models.common.UByte # not a typo 394 char = pyffi.object_models.common.Char 395 short = pyffi.object_models.common.Short 396 ushort = pyffi.object_models.common.UShort 397 float = pyffi.object_models.common.Float 398 BlockTypeIndex = pyffi.object_models.common.UShort 399 StringIndex = pyffi.object_models.common.UInt 400 SizedString = pyffi.object_models.common.SizedString 401 402 # implementation of nif-specific basic types 403
404 - class StringOffset(pyffi.object_models.common.Int):
405 """This is just an integer with -1 as default value."""
406 - def __init__(self, **kwargs):
407 pyffi.object_models.common.Int.__init__(self, **kwargs) 408 self.set_value(-1)
409
410 - class bool(BasicBase, EditableBoolComboBox):
411 """Basic implementation of a 32-bit (8-bit for versions > 4.0.0.2) 412 boolean type. 413 414 >>> i = NifFormat.bool() 415 >>> i.set_value('false') 416 >>> i.get_value() 417 False 418 >>> i.set_value('true') 419 >>> i.get_value() 420 True 421 """
422 - def __init__(self, **kwargs):
423 BasicBase.__init__(self, **kwargs) 424 self.set_value(False)
425
426 - def get_value(self):
427 return self._value
428
429 - def set_value(self, value):
430 if isinstance(value, basestring): 431 if value.lower() == 'false': 432 self._value = False 433 return 434 elif value == '0': 435 self._value = False 436 return 437 if value: 438 self._value = True 439 else: 440 self._value = False
441
442 - def get_size(self, data=None):
443 ver = data.version if data else -1 444 if ver > 0x04000002: 445 return 1 446 else: 447 return 4
448
449 - def get_hash(self, data=None):
450 return self._value
451
452 - def read(self, stream, data):
453 if data.version > 0x04000002: 454 value, = struct.unpack(data._byte_order + 'B', 455 stream.read(1)) 456 else: 457 value, = struct.unpack(data._byte_order + 'I', 458 stream.read(4)) 459 self._value = bool(value)
460
461 - def write(self, stream, data):
462 if data.version > 0x04000002: 463 stream.write(struct.pack(data._byte_order + 'B', 464 int(self._value))) 465 else: 466 stream.write(struct.pack(data._byte_order + 'I', 467 int(self._value)))
468
469 - class Flags(pyffi.object_models.common.UShort):
470 - def __str__(self):
471 return hex(self.get_value())
472
473 - class Ref(BasicBase):
474 """Reference to another block.""" 475 _is_template = True 476 _has_links = True 477 _has_refs = True
478 - def __init__(self, **kwargs):
479 BasicBase.__init__(self, **kwargs) 480 self._template = kwargs.get("template") 481 self.set_value(None)
482
483 - def get_value(self):
484 return self._value
485
486 - def set_value(self, value):
487 if value is None: 488 self._value = None 489 else: 490 if not isinstance(value, self._template): 491 raise TypeError( 492 'expected an instance of %s but got instance of %s' 493 % (self._template, value.__class__)) 494 self._value = value
495
496 - def get_size(self, data=None):
497 return 4
498
499 - def get_hash(self, data=None):
500 if self.get_value(): 501 return self.get_value().get_hash(data) 502 else: 503 return None
504
505 - def read(self, stream, data):
506 self.set_value(None) # fix_links will set this field 507 block_index, = struct.unpack(data._byte_order + 'i', 508 stream.read(4)) 509 data._link_stack.append(block_index)
510
511 - def write(self, stream, data):
512 """Write block reference.""" 513 if self.get_value() is None: 514 # -1: link by number, 0: link by pointer 515 block_index = -1 if data.version >= 0x0303000D else 0 516 else: 517 try: 518 block_index = data._block_index_dct[self.get_value()] 519 except KeyError: 520 logging.getLogger("pyffi.nif.ref").warn( 521 "%s block is missing from the nif tree:" 522 " omitting reference" 523 % self.get_value().__class__.__name__) 524 # -1: link by number, 0: link by pointer 525 block_index = -1 if data.version >= 0x0303000D else 0 526 stream.write(struct.pack( 527 data._byte_order + 'i', block_index))
528 550 557
558 - def get_refs(self, data=None):
559 val = self.get_value() 560 if val is not None: 561 return [val] 562 else: 563 return []
564
565 - def replace_global_node(self, oldbranch, newbranch, 566 edge_filter=EdgeFilter()):
567 """ 568 >>> from pyffi.formats.nif import NifFormat 569 >>> x = NifFormat.NiNode() 570 >>> y = NifFormat.NiNode() 571 >>> z = NifFormat.NiNode() 572 >>> x.add_child(y) 573 >>> x.children[0] is y 574 True 575 >>> x.children[0] is z 576 False 577 >>> x.replace_global_node(y, z) 578 >>> x.children[0] is y 579 False 580 >>> x.children[0] is z 581 True 582 >>> x.replace_global_node(z, None) 583 >>> x.children[0] is None 584 True 585 """ 586 if self.get_value() is oldbranch: 587 # set_value takes care of template type 588 self.set_value(newbranch) 589 #print("replacing", repr(oldbranch), "->", repr(newbranch)) 590 if self.get_value() is not None: 591 self.get_value().replace_global_node(oldbranch, newbranch)
592
593 - def get_detail_display(self):
594 # return the node itself, if it is not None 595 if self.get_value() is not None: 596 return self.get_value() 597 else: 598 return "None"
599
600 - class Ptr(Ref):
601 """A weak reference to another block, used to point up the hierarchy tree. The reference is not returned by the L{get_refs} function to avoid infinite recursion.""" 602 _is_template = True 603 _has_links = True 604 _has_refs = False 605 606 # use weak reference to aid garbage collection 607
608 - def get_value(self):
609 return self._value() if self._value is not None else None
610
611 - def set_value(self, value):
612 if value is None: 613 self._value = None 614 else: 615 if not isinstance(value, self._template): 616 raise TypeError( 617 'expected an instance of %s but got instance of %s' 618 % (self._template, value.__class__)) 619 self._value = weakref.ref(value)
620
621 - def __str__(self):
622 # avoid infinite recursion 623 return '%s instance at 0x%08X'%(self._value.__class__, id(self._value))
624
625 - def get_refs(self, data=None):
626 return []
627
628 - def get_hash(self, data=None):
629 return None
630
631 - def replace_global_node(self, oldbranch, newbranch, 632 edge_filter=EdgeFilter()):
633 # overridden to avoid infinite recursion 634 if self.get_value() is oldbranch: 635 self.set_value(newbranch)
636 #print("replacing", repr(oldbranch), "->", repr(newbranch)) 637
638 - class LineString(BasicBase):
639 """Basic type for strings ending in a newline character (0x0a). 640 641 >>> from tempfile import TemporaryFile 642 >>> f = TemporaryFile() 643 >>> l = NifFormat.LineString() 644 >>> f.write('abcdefg\\x0a'.encode()) 645 >>> f.seek(0) 646 >>> l.read(f) 647 >>> str(l) 648 'abcdefg' 649 >>> f.seek(0) 650 >>> l.set_value('Hi There') 651 >>> l.write(f) 652 >>> f.seek(0) 653 >>> m = NifFormat.LineString() 654 >>> m.read(f) 655 >>> str(m) 656 'Hi There' 657 """
658 - def __init__(self, **kwargs):
659 BasicBase.__init__(self, **kwargs) 660 self.set_value('')
661
662 - def get_value(self):
663 return self._value
664
665 - def set_value(self, value):
666 self._value = pyffi.object_models.common._as_bytes(value).rstrip('\x0a'.encode("ascii"))
667
668 - def __str__(self):
670
671 - def get_size(self, data=None):
672 return len(self._value) + 1 # +1 for trailing endline
673
674 - def get_hash(self, data=None):
675 return self.get_value()
676
677 - def read(self, stream, data=None):
678 self._value = stream.readline().rstrip('\x0a'.encode("ascii"))
679
680 - def write(self, stream, data=None):
681 stream.write(self._value) 682 stream.write("\x0a".encode("ascii"))
683
684 - class HeaderString(BasicBase):
685 - def __str__(self):
686 return 'NetImmerse/Gamebryo File Format, Version x.x.x.x'
687
688 - def get_detail_display(self):
689 return self.__str__()
690
691 - def get_hash(self, data=None):
692 return None
693
694 - def read(self, stream, data):
695 version_string = self.version_string(data.version, data.modification) 696 s = stream.read(len(version_string)) 697 if s != version_string.encode("ascii"): 698 raise ValueError( 699 "invalid NIF header: expected '%s' but got '%s'" 700 % (version_string, s)) 701 # for almost all nifs we have version_string + \x0a 702 # but Bully SE has some nifs with version_string + \x0d\x0a 703 # see for example World/BBonusB.nft 704 eol = stream.read(1) 705 if eol == '\x0d'.encode("ascii"): 706 eol = stream.read(1) 707 if eol != '\x0a'.encode("ascii"): 708 raise ValueError( 709 "invalid NIF header: bad version string eol")
710
711 - def write(self, stream, data):
712 stream.write(self.version_string(data.version, data.modification).encode("ascii")) 713 stream.write('\x0a'.encode("ascii"))
714
715 - def get_size(self, data=None):
716 ver = data.version if data else -1 717 return len(self.version_string(ver).encode("ascii")) + 1
718 719 @staticmethod
720 - def version_string(version, modification=None):
721 """Transforms version number into a version string. 722 723 >>> NifFormat.HeaderString.version_string(0x03000300) 724 'NetImmerse File Format, Version 3.03' 725 >>> NifFormat.HeaderString.version_string(0x03010000) 726 'NetImmerse File Format, Version 3.1' 727 >>> NifFormat.HeaderString.version_string(0x0A000100) 728 'NetImmerse File Format, Version 10.0.1.0' 729 >>> NifFormat.HeaderString.version_string(0x0A010000) 730 'Gamebryo File Format, Version 10.1.0.0' 731 >>> NifFormat.HeaderString.version_string(0x0A010000, 732 ... modification="neosteam") 733 'NS' 734 >>> NifFormat.HeaderString.version_string(0x14020008, 735 ... modification="ndoors") 736 'NDSNIF....@....@...., Version 20.2.0.8' 737 >>> NifFormat.HeaderString.version_string(0x14030009, 738 ... modification="jmihs1") 739 'Joymaster HS1 Object Format - (JMI), Version 20.3.0.9' 740 """ 741 if version == -1 or version is None: 742 raise ValueError('No string for version %s.'%version) 743 if modification == "neosteam": 744 if version != 0x0A010000: 745 raise ValueError("NeoSteam must have version 0x0A010000.") 746 return "NS" 747 elif version <= 0x0A000102: 748 s = "NetImmerse" 749 else: 750 s = "Gamebryo" 751 if version == 0x03000300: 752 v = "3.03" 753 elif version <= 0x03010000: 754 v = "%i.%i"%((version >> 24) & 0xff, (version >> 16) & 0xff) 755 else: 756 v = "%i.%i.%i.%i"%((version >> 24) & 0xff, (version >> 16) & 0xff, (version >> 8) & 0xff, version & 0xff) 757 if modification == "ndoors": 758 return "NDSNIF....@....@...., Version %s" % v 759 elif modification == "jmihs1": 760 return "Joymaster HS1 Object Format - (JMI), Version %s" % v 761 else: 762 return "%s File Format, Version %s" % (s, v)
763
764 - class FileVersion(BasicBase):
765 - def get_value(self):
766 raise NotImplementedError
767
768 - def set_value(self, value):
769 raise NotImplementedError
770
771 - def __str__(self):
772 return 'x.x.x.x'
773
774 - def get_size(self, data=None):
775 return 4
776
777 - def get_hash(self, data=None):
778 return None
779
780 - def read(self, stream, data):
781 modification = data.modification 782 ver, = struct.unpack('<I', stream.read(4)) # always little endian 783 if (not modification) or modification == "jmihs1": 784 if ver != data.version: 785 raise ValueError( 786 "Invalid version number: " 787 "expected 0x%08X but got 0x%08X." 788 % (data.version, ver)) 789 elif modification == "neosteam": 790 if ver != 0x08F35232: 791 raise ValueError( 792 "Invalid NeoSteam version number: " 793 "expected 0x%08X but got 0x%08X." 794 % (0x08F35232, ver)) 795 elif modification == "ndoors": 796 if ver != 0x73615F67: 797 raise ValueError( 798 "Invalid Ndoors version number: " 799 "expected 0x%08X but got 0x%08X." 800 % (0x73615F67, ver)) 801 elif modification == "laxelore": 802 if ver != 0x5A000004: 803 raise ValueError( 804 "Invalid Laxe Lore version number: " 805 "expected 0x%08X but got 0x%08X." 806 % (0x5A000004, ver)) 807 else: 808 raise ValueError( 809 "unknown modification: '%s'" % modification)
810
811 - def write(self, stream, data):
812 # always little endian 813 modification = data.modification 814 if (not modification) or modification == "jmihs1": 815 stream.write(struct.pack('<I', data.version)) 816 elif modification == "neosteam": 817 stream.write(struct.pack('<I', 0x08F35232)) 818 elif modification == "ndoors": 819 stream.write(struct.pack('<I', 0x73615F67)) 820 elif modification == "laxelore": 821 stream.write(struct.pack('<I', 0x5A000004)) 822 else: 823 raise ValueError( 824 "unknown modification: '%s'" % modification)
825
826 - def get_detail_display(self):
827 return 'x.x.x.x'
828
829 - class ShortString(BasicBase):
830 """Another type for strings."""
831 - def __init__(self, **kwargs):
832 BasicBase.__init__(self, **kwargs) 833 self._value = ''.encode("ascii")
834
835 - def get_value(self):
836 return self._value
837
838 - def set_value(self, value):
839 val = pyffi.object_models.common._as_bytes(value) 840 if len(val) > 254: 841 raise ValueError('string too long') 842 self._value = val
843
844 - def __str__(self):
846
847 - def get_size(self, data=None):
848 # length byte + string chars + zero byte 849 return len(self._value) + 2
850
851 - def get_hash(self, data=None):
852 return self.get_value()
853
854 - def read(self, stream, data):
855 n, = struct.unpack(data._byte_order + 'B', 856 stream.read(1)) 857 self._value = stream.read(n).rstrip('\x00'.encode("ascii"))
858
859 - def write(self, stream, data):
860 stream.write(struct.pack(data._byte_order + 'B', 861 len(self._value)+1)) 862 stream.write(self._value) 863 stream.write('\x00'.encode("ascii"))
864
865 - class string(SizedString):
866 _has_strings = True 867
868 - def get_size(self, data=None):
869 ver = data.version if data else -1 870 if ver >= 0x14010003: 871 return 4 872 else: 873 return 4 + len(self._value)
874
875 - def read(self, stream, data):
876 n, = struct.unpack(data._byte_order + 'i', stream.read(4)) 877 if data.version >= 0x14010003: 878 if n == -1: 879 self._value = ''.encode("ascii") 880 else: 881 try: 882 self._value = data._string_list[n] 883 except IndexError: 884 raise ValueError('string index too large (%i)'%n) 885 else: 886 if n > 10000: 887 raise ValueError('string too long (0x%08X at 0x%08X)' 888 % (n, stream.tell())) 889 self._value = stream.read(n)
890
891 - def write(self, stream, data):
892 if data.version >= 0x14010003: 893 if not self._value: 894 stream.write( 895 struct.pack(data._byte_order + 'i', -1)) 896 else: 897 try: 898 stream.write(struct.pack( 899 data._byte_order + 'i', 900 data._string_list.index(self._value))) 901 except ValueError: 902 raise ValueError( 903 "string '%s' not in string list" % self._value) 904 else: 905 stream.write(struct.pack(data._byte_order + 'I', 906 len(self._value))) 907 stream.write(self._value)
908
909 - def get_strings(self, data):
910 if self._value: 911 return [self._value] 912 else: 913 return []
914
915 - def get_hash(self, data=None):
916 return self.get_value()
917 918 # other types with internal implementation 919
920 - class FilePath(string):
921 """A file path."""
922 - def get_hash(self, data=None):
923 """Returns a case insensitive hash value.""" 924 return self.get_value().lower()
925
926 - class ByteArray(BasicBase):
927 """Array (list) of bytes. Implemented as basic type to speed up reading 928 and also to prevent data to be dumped by __str__."""
929 - def __init__(self, **kwargs):
930 BasicBase.__init__(self, **kwargs) 931 self.set_value("".encode()) # b'' for > py25
932
933 - def get_value(self):
934 return self._value
935
936 - def set_value(self, value):
938
939 - def get_size(self, data=None):
940 return len(self._value) + 4
941
942 - def get_hash(self, data=None):
943 return self._value.__hash__()
944
945 - def read(self, stream, data):
946 size, = struct.unpack(data._byte_order + 'I', 947 stream.read(4)) 948 self._value = stream.read(size)
949
950 - def write(self, stream, data):
951 stream.write(struct.pack(data._byte_order + 'I', 952 len(self._value))) 953 stream.write(self._value)
954
955 - def __str__(self):
956 return "< %i Bytes >" % len(self._value)
957
958 - class ByteMatrix(BasicBase):
959 """Matrix of bytes. Implemented as basic type to speed up reading 960 and to prevent data being dumped by __str__."""
961 - def __init__(self, **kwargs):
962 BasicBase.__init__(self, **kwargs) 963 self.set_value([])
964
965 - def get_value(self):
966 return self._value
967
968 - def set_value(self, value):
969 assert(isinstance(value, list)) 970 if value: 971 size1 = len(value[0]) 972 for x in value: 973 # TODO fix this for py3k 974 #assert(isinstance(x, basestring)) 975 assert(len(x) == size1) 976 self._value = value # should be a list of strings of bytes
977
978 - def get_size(self, data=None):
979 if len(self._value) == 0: 980 return 8 981 else: 982 return len(self._value) * len(self._value[0]) + 8
983
984 - def get_hash(self, data=None):
985 return tuple( x.__hash__() for x in self._value )
986
987 - def read(self, stream, data):
988 size1, = struct.unpack(data._byte_order + 'I', 989 stream.read(4)) 990 size2, = struct.unpack(data._byte_order + 'I', 991 stream.read(4)) 992 self._value = [] 993 for i in xrange(size2): 994 self._value.append(stream.read(size1))
995
996 - def write(self, stream, data):
997 if self._value: 998 stream.write(struct.pack(data._byte_order + 'I', 999 len(self._value[0]))) 1000 else: 1001 stream.write(struct.pack(data._byte_order + 'I', 0)) 1002 stream.write(struct.pack(data._byte_order + 'I', 1003 len(self._value))) 1004 for x in self._value: 1005 stream.write(x)
1006
1007 - def __str__(self):
1008 size1 = len(self._value[0]) if self._value else 0 1009 size2 = len(self._value) 1010 return "< %ix%i Bytes >" % (size2, size1)
1011 1012 @classmethod
1013 - def vercondFilter(cls, expression):
1014 if expression == "Version": 1015 return "version" 1016 elif expression == "User Version": 1017 return "user_version" 1018 elif expression == "User Version 2": 1019 return "user_version2" 1020 ver = cls.version_number(expression) 1021 if ver < 0: 1022 # not supported? 1023 raise ValueError( 1024 "cannot recognize version expression '%s'" % expression) 1025 else: 1026 return ver
1027 1028 @staticmethod
1029 - def version_number(version_str):
1030 """Converts version string into an integer. 1031 1032 :param version_str: The version string. 1033 :type version_str: str 1034 :return: A version integer. 1035 1036 >>> hex(NifFormat.version_number('3.14.15.29')) 1037 '0x30e0f1d' 1038 >>> hex(NifFormat.version_number('1.2')) 1039 '0x1020000' 1040 >>> hex(NifFormat.version_number('3.03')) 1041 '0x3000300' 1042 >>> hex(NifFormat.version_number('NS')) 1043 '0xa010000' 1044 """ 1045 1046 # 3.03 case is special 1047 if version_str == '3.03': 1048 return 0x03000300 1049 1050 # NS (neosteam) case is special 1051 if version_str == 'NS': 1052 return 0x0A010000 1053 1054 try: 1055 ver_list = [int(x) for x in version_str.split('.')] 1056 except ValueError: 1057 return -1 # version not supported (i.e. version_str '10.0.1.3a' would trigger this) 1058 if len(ver_list) > 4 or len(ver_list) < 1: 1059 return -1 # version not supported 1060 for ver_digit in ver_list: 1061 if (ver_digit | 0xff) > 0xff: 1062 return -1 # version not supported 1063 while len(ver_list) < 4: ver_list.append(0) 1064 return (ver_list[0] << 24) + (ver_list[1] << 16) + (ver_list[2] << 8) + ver_list[3]
1065 1066 # exceptions
1067 - class NifError(Exception):
1068 """Standard nif exception class.""" 1069 pass
1070
1071 - class Data(pyffi.object_models.FileFormat.Data):
1072 """A class to contain the actual nif data. 1073 1074 Note that L{header} and L{blocks} are not automatically kept 1075 in sync with the rest of the nif data, but they are 1076 resynchronized when calling L{write}. 1077 1078 :ivar version: The nif version. 1079 :type version: ``int`` 1080 :ivar user_version: The nif user version. 1081 :type user_version: ``int`` 1082 :ivar user_version2: The nif user version 2. 1083 :type user_version2: ``int`` 1084 :ivar roots: List of root blocks. 1085 :type roots: ``list`` of L{NifFormat.NiObject} 1086 :ivar header: The nif header. 1087 :type header: L{NifFormat.Header} 1088 :ivar blocks: List of blocks. 1089 :type blocks: ``list`` of L{NifFormat.NiObject} 1090 :ivar modification: Neo Steam ("neosteam") or Ndoors ("ndoors") or Joymaster Interactive Howling Sword ("jmihs1") or Laxe Lore ("laxelore") style nif? 1091 :type modification: ``str`` 1092 """ 1093 1094 _link_stack = None 1095 _block_dct = None 1096 _string_list = None 1097 _block_index_dct = None 1098
1099 - class VersionUInt(pyffi.object_models.common.UInt):
1100 - def set_value(self, value):
1101 if value is None: 1102 self._value = None 1103 else: 1104 pyffi.object_models.common.UInt.set_value(self, value)
1105
1106 - def __str__(self):
1107 if self._value is None: 1108 return "None" 1109 else: 1110 return "0x%08X" % self.get_value()
1111
1112 - def get_detail_display(self):
1113 return self.__str__()
1114
1115 - def __init__(self, version=0x04000002, user_version=0, user_version2=0):
1116 """Initialize nif data. By default, this creates an empty 1117 nif document of the given version and user version. 1118 1119 :param version: The version. 1120 :type version: ``int`` 1121 :param user_version: The user version. 1122 :type user_version: ``int`` 1123 """ 1124 # the version numbers are stored outside the header structure 1125 self._version_value_ = self.VersionUInt() 1126 self._version_value_.set_value(version) 1127 self._user_version_value_ = self.VersionUInt() 1128 self._user_version_value_.set_value(user_version) 1129 self._user_version_2_value_ = self.VersionUInt() 1130 self._user_version_2_value_.set_value(user_version2) 1131 # create new header 1132 self.header = NifFormat.Header() 1133 # empty list of root blocks (this encodes the footer) 1134 self.roots = [] 1135 # empty list of blocks 1136 self.blocks = [] 1137 # not a neosteam or ndoors nif 1138 self.modification = None
1139
1140 - def _getVersion(self):
1141 return self._version_value_.get_value()
1142 - def _setVersion(self, value):
1143 self._version_value_.set_value(value)
1144
1145 - def _getUserVersion(self):
1146 return self._user_version_value_.get_value()
1147 - def _setUserVersion(self, value):
1148 self._user_version_value_.set_value(value)
1149
1150 - def _getUserVersion2(self):
1151 return self._user_version_2_value_.get_value()
1152 - def _setUserVersion2(self, value):
1153 self._user_version_2_value_.set_value(value)
1154 1155 version = property(_getVersion, _setVersion) 1156 user_version = property(_getUserVersion, _setUserVersion) 1157 user_version2 = property(_getUserVersion2, _setUserVersion2) 1158 1159 # new functions 1160
1161 - def inspect_version_only(self, stream):
1162 """This function checks the version only, and is faster 1163 than the usual inspect function (which reads the full 1164 header). Sets the L{version} and L{user_version} instance 1165 variables if the stream contains a valid nif file. 1166 1167 Call this function if you simply wish to check that a file is 1168 a nif file without having to parse even the header. 1169 1170 :raise ``ValueError``: If the stream does not contain a nif file. 1171 :param stream: The stream from which to read. 1172 :type stream: ``file`` 1173 """ 1174 pos = stream.tell() 1175 try: 1176 s = stream.readline(64).rstrip() 1177 finally: 1178 stream.seek(pos) 1179 self.modification = None 1180 if s.startswith("NetImmerse File Format, Version ".encode("ascii")): 1181 version_str = s[32:].decode("ascii") 1182 elif s.startswith("Gamebryo File Format, Version ".encode("ascii")): 1183 version_str = s[30:].decode("ascii") 1184 elif s.startswith("NS".encode("ascii")): 1185 # neosteam 1186 version_str = "NS" 1187 self.modification = "neosteam" 1188 elif s.startswith("NDSNIF....@....@...., Version ".encode("ascii")): 1189 version_str = s[30:].decode("ascii") 1190 self.modification = "ndoors" 1191 elif s.startswith("Joymaster HS1 Object Format - (JMI), Version ".encode("ascii")): 1192 version_str = s[45:].decode("ascii") 1193 self.modification = "jmihs1" 1194 else: 1195 raise ValueError("Not a nif file.") 1196 try: 1197 ver = NifFormat.version_number(version_str) 1198 except: 1199 raise ValueError("Nif version %s not supported." % version_str) 1200 if not ver in NifFormat.versions.values(): 1201 raise ValueError("Nif version %s not supported." % version_str) 1202 # check version integer and user version 1203 userver = 0 1204 userver2 = 0 1205 if ver >= 0x0303000D: 1206 ver_int = None 1207 try: 1208 stream.readline(64) 1209 ver_int, = struct.unpack('<I', stream.read(4)) 1210 # special case for Laxe Lore 1211 if ver_int == 0x5A000004 and ver == 0x14000004: 1212 self.modification = "laxelore" 1213 # neosteam and ndoors have a special version integer 1214 elif (not self.modification) or self.modification == "jmihs1": 1215 if ver_int != ver: 1216 raise ValueError( 1217 "Corrupted nif file: header version string %s" 1218 " does not correspond with header version field" 1219 " 0x%08X." % (version_str, ver_int)) 1220 elif self.modification == "neosteam": 1221 if ver_int != 0x08F35232: 1222 raise ValueError( 1223 "Corrupted nif file: invalid NeoSteam version.") 1224 elif self.modification == "ndoors": 1225 if ver_int != 0x73615F67: 1226 raise ValueError( 1227 "Corrupted nif file: invalid Ndoors version.") 1228 if ver >= 0x14000004: 1229 endian_type, = struct.unpack('<B', stream.read(1)) 1230 if endian_type == 0: 1231 # big endian! 1232 self._byte_order = '>' 1233 if ver >= 0x0A010000: 1234 userver, = struct.unpack('<I', stream.read(4)) 1235 if userver in (10, 11): 1236 stream.read(4) # number of blocks 1237 userver2, = struct.unpack('<I', stream.read(4)) 1238 finally: 1239 stream.seek(pos) 1240 self.version = ver 1241 self.user_version = userver 1242 self.user_version2 = userver2
1243 1244 # GlobalNode 1245
1246 - def get_global_child_nodes(self, edge_filter=EdgeFilter()):
1247 return (root for root in self.roots)
1248 1249 # DetailNode 1250
1251 - def replace_global_node(self, oldbranch, newbranch, 1252 edge_filter=EdgeFilter()):
1253 for i, root in enumerate(self.roots): 1254 if root is oldbranch: 1255 self.roots[i] = newbranch 1256 else: 1257 root.replace_global_node(oldbranch, newbranch, 1258 edge_filter=edge_filter)
1259
1260 - def get_detail_child_nodes(self, edge_filter=EdgeFilter()):
1261 yield self._version_value_ 1262 yield self._user_version_value_ 1263 yield self._user_version_2_value_ 1264 yield self.header
1265
1266 - def get_detail_child_names(self, edge_filter=EdgeFilter()):
1267 yield "Version" 1268 yield "User Version" 1269 yield "User Version 2" 1270 yield "Header"
1271 1272 # overriding pyffi.object_models.FileFormat.Data methods 1273
1274 - def inspect(self, stream):
1275 """Quickly checks whether the stream appears to contain 1276 nif data, and read the nif header. Resets stream to original position. 1277 1278 Call this function if you only need to inspect the header of the nif. 1279 1280 :param stream: The file to inspect. 1281 :type stream: ``file`` 1282 """ 1283 pos = stream.tell() 1284 try: 1285 self.inspect_version_only(stream) 1286 self.header.read(stream, data=self) 1287 finally: 1288 stream.seek(pos)
1289
1290 - def read(self, stream):
1291 """Read a nif file. Does not reset stream position. 1292 1293 :param stream: The stream from which to read. 1294 :type stream: ``file`` 1295 """ 1296 logger = logging.getLogger("pyffi.nif.data") 1297 # read header 1298 logger.debug("Reading header at 0x%08X" % stream.tell()) 1299 self.inspect_version_only(stream) 1300 logger.debug("Version 0x%08X" % self.version) 1301 self.header.read(stream, data=self) 1302 1303 # list of root blocks 1304 # for versions < 3.3.0.13 this list is updated through the 1305 # "Top Level Object" string while reading the blocks 1306 # for more recent versions, this list is updated at the end when the 1307 # footer is read 1308 self.roots = [] 1309 1310 # read the blocks 1311 self._link_stack = [] # list of indices, as they are added to the stack 1312 self._string_list = [s for s in self.header.strings] 1313 self._block_dct = {} # maps block index to actual block 1314 self.blocks = [] # records all blocks as read from file in order 1315 block_num = 0 # the current block numner 1316 1317 while True: 1318 if self.version < 0x0303000D: 1319 # check if this is a 'Top Level Object' 1320 pos = stream.tell() 1321 top_level_str = NifFormat.SizedString() 1322 top_level_str.read(stream, data=self) 1323 top_level_str = str(top_level_str) 1324 if top_level_str == "Top Level Object": 1325 is_root = True 1326 else: 1327 is_root = False 1328 stream.seek(pos) 1329 else: 1330 # signal as no root for now, roots are added when the footer 1331 # is read 1332 is_root = False 1333 1334 # get block name 1335 if self.version >= 0x05000001: 1336 # note the 0xfff mask: required for the NiPhysX blocks 1337 block_type = self.header.block_types[ 1338 self.header.block_type_index[block_num] & 0xfff] 1339 block_type = block_type.decode("ascii") 1340 # handle data stream classes 1341 if block_type.startswith("NiDataStream\x01"): 1342 block_type, data_stream_usage, data_stream_access = block_type.split("\x01") 1343 data_stream_usage = int(data_stream_usage) 1344 data_stream_access = int(data_stream_access) 1345 # read dummy integer 1346 # bhk blocks are *not* preceeded by a dummy 1347 if self.version <= 0x0A01006A and not block_type.startswith("bhk"): 1348 dummy, = struct.unpack(self._byte_order + 'I', 1349 stream.read(4)) 1350 if dummy != 0: 1351 raise NifFormat.NifError( 1352 'non-zero block tag 0x%08X at 0x%08X)' 1353 %(dummy, stream.tell())) 1354 else: 1355 block_type = NifFormat.SizedString() 1356 block_type.read(stream, self) 1357 block_type = block_type.get_value().decode("ascii") 1358 # get the block index 1359 if self.version >= 0x0303000D: 1360 # for these versions the block index is simply the block number 1361 block_index = block_num 1362 else: 1363 # earlier versions 1364 # the number of blocks is not in the header 1365 # and a special block type string marks the end of the file 1366 if block_type == "End Of File": break 1367 # read the block index, which is probably the memory 1368 # location of the object when it was written to 1369 # memory 1370 else: 1371 block_index, = struct.unpack( 1372 self._byte_order + 'I', stream.read(4)) 1373 if block_index in self._block_dct: 1374 raise NifFormat.NifError( 1375 'duplicate block index (0x%08X at 0x%08X)' 1376 %(block_index, stream.tell())) 1377 # create the block 1378 try: 1379 block = getattr(NifFormat, block_type)() 1380 except AttributeError: 1381 raise ValueError( 1382 "Unknown block type '%s'." % block_type) 1383 logger.debug("Reading %s block at 0x%08X" 1384 % (block_type, stream.tell())) 1385 # read the block 1386 try: 1387 block.read(stream, self) 1388 except: 1389 logger.exception("Reading %s failed" % block.__class__) 1390 #logger.error("link stack: %s" % self._link_stack) 1391 #logger.error("block that failed:") 1392 #logger.error("%s" % block) 1393 raise 1394 # complete NiDataStream data 1395 if block_type == "NiDataStream": 1396 block.usage = data_stream_usage 1397 block.access.from_int(data_stream_access, self) 1398 # store block index 1399 self._block_dct[block_index] = block 1400 self.blocks.append(block) 1401 # check block size 1402 if self.version >= 0x14020007: 1403 logger.debug("Checking block size") 1404 calculated_size = block.get_size(data=self) 1405 if calculated_size != self.header.block_size[block_num]: 1406 extra_size = self.header.block_size[block_num] - calculated_size 1407 logger.error( 1408 "Block size check failed: corrupt nif file " 1409 "or bad nif.xml?") 1410 logger.error("Skipping %i bytes in %s" 1411 % (extra_size, block.__class__.__name__)) 1412 # skip bytes that were missed 1413 stream.seek(extra_size, 1) 1414 # add block to roots if flagged as such 1415 if is_root: 1416 self.roots.append(block) 1417 # check if we are done 1418 block_num += 1 1419 if self.version >= 0x0303000D: 1420 if block_num >= self.header.num_blocks: 1421 break 1422 1423 # read footer 1424 ftr = NifFormat.Footer() 1425 ftr.read(stream, self) 1426 1427 # check if we are at the end of the file 1428 if stream.read(1): 1429 logger.error( 1430 'End of file not reached: corrupt nif file?') 1431 1432 # fix links in blocks and footer (header has no links) 1433 for block in self.blocks: 1434 block.fix_links(self) 1435 ftr.fix_links(self) 1436 # the link stack should be empty now 1437 if self._link_stack: 1438 raise NifFormat.NifError('not all links have been popped from the stack (bug?)') 1439 # add root objects in footer to roots list 1440 if self.version >= 0x0303000D: 1441 for root in ftr.roots: 1442 self.roots.append(root)
1443
1444 - def write(self, stream):
1445 """Write a nif file. The L{header} and the L{blocks} are recalculated 1446 from the tree at L{roots} (e.g. list of block types, number of blocks, 1447 list of block types, list of strings, list of block sizes etc.). 1448 1449 :param stream: The stream to which to write. 1450 :type stream: file 1451 """ 1452 logger = logging.getLogger("pyffi.nif.data") 1453 # set up index and type dictionary 1454 self.blocks = [] # list of all blocks to be written 1455 self._block_index_dct = {} # maps block to block index 1456 block_type_list = [] # list of all block type strings 1457 block_type_dct = {} # maps block to block type string index 1458 self._string_list = [] 1459 for root in self.roots: 1460 self._makeBlockList(root, 1461 self._block_index_dct, 1462 block_type_list, block_type_dct) 1463 for block in root.tree(): 1464 self._string_list.extend( 1465 block.get_strings(self)) 1466 self._string_list = list(set(self._string_list)) # ensure unique elements 1467 #print(self._string_list) # debug 1468 1469 self.header.user_version = self.user_version # TODO dedicated type for user_version similar to FileVersion 1470 # for oblivion CS; apparently this is the version of the bhk blocks 1471 self.header.user_version_2 = self.user_version2 1472 self.header.num_blocks = len(self.blocks) 1473 self.header.num_block_types = len(block_type_list) 1474 self.header.block_types.update_size() 1475 for i, block_type in enumerate(block_type_list): 1476 self.header.block_types[i] = block_type 1477 self.header.block_type_index.update_size() 1478 for i, block in enumerate(self.blocks): 1479 self.header.block_type_index[i] = block_type_dct[block] 1480 self.header.num_strings = len(self._string_list) 1481 if self._string_list: 1482 self.header.max_string_length = max([len(s) for s in self._string_list]) 1483 else: 1484 self.header.max_string_length = 0 1485 self.header.strings.update_size() 1486 for i, s in enumerate(self._string_list): 1487 self.header.strings[i] = s 1488 self.header.block_size.update_size() 1489 for i, block in enumerate(self.blocks): 1490 self.header.block_size[i] = block.get_size(data=self) 1491 #if verbose >= 2: 1492 # print(hdr) 1493 1494 # set up footer 1495 ftr = NifFormat.Footer() 1496 ftr.num_roots = len(self.roots) 1497 ftr.roots.update_size() 1498 for i, root in enumerate(self.roots): 1499 ftr.roots[i] = root 1500 1501 # write the file 1502 logger.debug("Writing header") 1503 #logger.debug("%s" % self.header) 1504 self.header.write(stream, self) 1505 for block in self.blocks: 1506 # signal top level object if block is a root object 1507 if self.version < 0x0303000D and block in self.roots: 1508 s = NifFormat.SizedString() 1509 s.set_value("Top Level Object") 1510 s.write(stream, self) 1511 if self.version >= 0x05000001: 1512 if self.version <= 0x0A01006A: 1513 # write zero dummy separator 1514 stream.write('\x00\x00\x00\x00'.encode("ascii")) 1515 else: 1516 # write block type string 1517 s = NifFormat.SizedString() 1518 assert(block_type_list[block_type_dct[block]] 1519 == block.__class__.__name__) # debug 1520 s.set_value(block.__class__.__name__) 1521 s.write(stream, self) 1522 # write block index 1523 logger.debug("Writing %s block" % block.__class__.__name__) 1524 if self.version < 0x0303000D: 1525 stream.write(struct.pack(self._byte_order + 'i', 1526 self._block_index_dct[block])) 1527 # write block 1528 block.write(stream, self) 1529 if self.version < 0x0303000D: 1530 s = NifFormat.SizedString() 1531 s.set_value("End Of File") 1532 s.write(stream) 1533 ftr.write(stream, self)
1534
1535 - def _makeBlockList( 1536 self, root, block_index_dct, block_type_list, block_type_dct):
1537 """This is a helper function for write to set up the list of all blocks, 1538 the block index map, and the block type map. 1539 1540 :param root: The root block, whose tree is to be added to 1541 the block list. 1542 :type root: L{NifFormat.NiObject} 1543 :param block_index_dct: Dictionary mapping blocks in self.blocks to 1544 their block index. 1545 :type block_index_dct: dict 1546 :param block_type_list: List of all block types. 1547 :type block_type_list: list of str 1548 :param block_type_dct: Dictionary mapping blocks in self.blocks to 1549 their block type index. 1550 :type block_type_dct: dict 1551 """ 1552 1553 def _blockChildBeforeParent(block): 1554 """Determine whether block comes before its parent or not, depending 1555 on the block type. 1556 1557 @todo: Move to the L{NifFormat.Data} class. 1558 1559 :param block: The block to test. 1560 :type block: L{NifFormat.NiObject} 1561 :return: ``True`` if child should come first, ``False`` otherwise. 1562 """ 1563 return (isinstance(block, NifFormat.bhkRefObject) 1564 and not isinstance(block, NifFormat.bhkConstraint))
1565 1566 # block already listed? if so, return 1567 if root in self.blocks: 1568 return 1569 # add block type to block type dictionary 1570 block_type = root.__class__.__name__ 1571 # special case: NiDataStream stores part of data in block type list 1572 if block_type == "NiDataStream": 1573 block_type = ("NiDataStream\x01%i\x01%i" 1574 % (root.usage, root.access.to_int(self))) 1575 try: 1576 block_type_dct[root] = block_type_list.index(block_type) 1577 except ValueError: 1578 block_type_dct[root] = len(block_type_list) 1579 block_type_list.append(block_type) 1580 1581 # special case: add bhkConstraint entities before bhkConstraint 1582 # (these are actually links, not refs) 1583 if isinstance(root, NifFormat.bhkConstraint): 1584 for entity in root.entities: 1585 self._makeBlockList( 1586 entity, block_index_dct, block_type_list, block_type_dct) 1587 1588 children_left = [] 1589 # add children that come before the block 1590 # store any remaining children in children_left (processed later) 1591 for child in root.get_refs(data=self): 1592 if _blockChildBeforeParent(child): 1593 self._makeBlockList( 1594 child, block_index_dct, block_type_list, block_type_dct) 1595 else: 1596 children_left.append(child) 1597 1598 # add the block 1599 if self.version >= 0x0303000D: 1600 block_index_dct[root] = len(self.blocks) 1601 else: 1602 block_index_dct[root] = id(root) 1603 self.blocks.append(root) 1604 1605 # add children that come after the block 1606 for child in children_left: 1607 self._makeBlockList( 1608 child, block_index_dct, block_type_list, block_type_dct)
1609 1610 # extensions of generated structures 1611
1612 - class Footer:
1613 - def read(self, stream, data):
1614 StructBase.read(self, stream, data) 1615 modification = getattr(data, 'modification', None) 1616 if modification == "neosteam": 1617 extrabyte, = struct.unpack("<B", stream.read(1)) 1618 if extrabyte != 0: 1619 raise ValueError( 1620 "Expected trailing zero byte in footer, " 1621 "but got %i instead." % extrabyte)
1622
1623 - def write(self, stream, data):
1624 StructBase.write(self, stream, data) 1625 modification = getattr(data, 'modification', None) 1626 if modification == "neosteam": 1627 stream.write("\x00".encode("ascii"))
1628 1629
1630 - class Header:
1631 - def has_block_type(self, block_type):
1632 """Check if header has a particular block type. 1633 1634 :raise ``ValueError``: If number of block types is zero 1635 (only nif versions 10.0.1.0 and up store block types 1636 in header). 1637 1638 :param block_type: The block type. 1639 :type block_type: L{NifFormat.NiObject} 1640 :return: ``True`` if the header's list of block types has the given 1641 block type, or a subclass of it. ``False`` otherwise. 1642 :rtype: ``bool`` 1643 """ 1644 # check if we can check the block types at all 1645 if self.num_block_types == 0: 1646 raise ValueError("header does not store any block types") 1647 # quick first check, without hierarchy, using simple string comparisons 1648 if block_type.__name__.encode() in self.block_types: 1649 return True 1650 # slower check, using isinstance 1651 for data_block_type in self.block_types: 1652 data_block_type = data_block_type.decode("ascii") 1653 # NiDataStreams are special 1654 if data_block_type.startswith("NiDataStream\x01"): 1655 data_block_type = "NiDataStream" 1656 if issubclass(getattr(NifFormat, data_block_type), block_type): 1657 return True 1658 # requested block type is not in nif 1659 return False
1660
1661 - class Matrix33:
1662 - def as_list(self):
1663 """Return matrix as 3x3 list.""" 1664 return [ 1665 [self.m_11, self.m_12, self.m_13], 1666 [self.m_21, self.m_22, self.m_23], 1667 [self.m_31, self.m_32, self.m_33] 1668 ]
1669
1670 - def as_tuple(self):
1671 """Return matrix as 3x3 tuple.""" 1672 return ( 1673 (self.m_11, self.m_12, self.m_13), 1674 (self.m_21, self.m_22, self.m_23), 1675 (self.m_31, self.m_32, self.m_33) 1676 )
1677
1678 - def __str__(self):
1679 return ( 1680 "[ %6.3f %6.3f %6.3f ]\n" 1681 "[ %6.3f %6.3f %6.3f ]\n" 1682 "[ %6.3f %6.3f %6.3f ]\n" 1683 % (self.m_11, self.m_12, self.m_13, 1684 self.m_21, self.m_22, self.m_23, 1685 self.m_31, self.m_32, self.m_33))
1686
1687 - def set_identity(self):
1688 """Set to identity matrix.""" 1689 self.m_11 = 1.0 1690 self.m_12 = 0.0 1691 self.m_13 = 0.0 1692 self.m_21 = 0.0 1693 self.m_22 = 1.0 1694 self.m_23 = 0.0 1695 self.m_31 = 0.0 1696 self.m_32 = 0.0 1697 self.m_33 = 1.0
1698
1699 - def is_identity(self):
1700 """Return ``True`` if the matrix is close to identity.""" 1701 if (abs(self.m_11 - 1.0) > NifFormat.EPSILON 1702 or abs(self.m_12) > NifFormat.EPSILON 1703 or abs(self.m_13) > NifFormat.EPSILON 1704 or abs(self.m_21) > NifFormat.EPSILON 1705 or abs(self.m_22 - 1.0) > NifFormat.EPSILON 1706 or abs(self.m_23) > NifFormat.EPSILON 1707 or abs(self.m_31) > NifFormat.EPSILON 1708 or abs(self.m_32) > NifFormat.EPSILON 1709 or abs(self.m_33 - 1.0) > NifFormat.EPSILON): 1710 return False 1711 else: 1712 return True
1713
1714 - def get_copy(self):
1715 """Return a copy of the matrix.""" 1716 mat = NifFormat.Matrix33() 1717 mat.m_11 = self.m_11 1718 mat.m_12 = self.m_12 1719 mat.m_13 = self.m_13 1720 mat.m_21 = self.m_21 1721 mat.m_22 = self.m_22 1722 mat.m_23 = self.m_23 1723 mat.m_31 = self.m_31 1724 mat.m_32 = self.m_32 1725 mat.m_33 = self.m_33 1726 return mat
1727
1728 - def get_transpose(self):
1729 """Get transposed of the matrix.""" 1730 mat = NifFormat.Matrix33() 1731 mat.m_11 = self.m_11 1732 mat.m_12 = self.m_21 1733 mat.m_13 = self.m_31 1734 mat.m_21 = self.m_12 1735 mat.m_22 = self.m_22 1736 mat.m_23 = self.m_32 1737 mat.m_31 = self.m_13 1738 mat.m_32 = self.m_23 1739 mat.m_33 = self.m_33 1740 return mat
1741
1742 - def is_scale_rotation(self):
1743 """Returns true if the matrix decomposes nicely into scale * rotation.""" 1744 # NOTE: 0.01 instead of NifFormat.EPSILON to work around bad nif files 1745 1746 # calculate self * self^T 1747 # this should correspond to 1748 # (scale * rotation) * (scale * rotation)^T 1749 # = scale^2 * rotation * rotation^T 1750 # = scale^2 * 3x3 identity matrix 1751 self_transpose = self.get_transpose() 1752 mat = self * self_transpose 1753 1754 # off diagonal elements should be zero 1755 if (abs(mat.m_12) + abs(mat.m_13) 1756 + abs(mat.m_21) + abs(mat.m_23) 1757 + abs(mat.m_31) + abs(mat.m_32)) > 0.01: 1758 return False 1759 1760 # diagonal elements should be equal (to scale^2) 1761 if abs(mat.m_11 - mat.m_22) + abs(mat.m_22 - mat.m_33) > 0.01: 1762 return False 1763 1764 return True
1765
1766 - def is_rotation(self):
1767 """Returns ``True`` if the matrix is a rotation matrix 1768 (a member of SO(3)).""" 1769 # NOTE: 0.01 instead of NifFormat.EPSILON to work around bad nif files 1770 1771 if not self.is_scale_rotation(): 1772 return False 1773 if abs(self.get_determinant() - 1.0) > 0.01: 1774 return False 1775 return True
1776
1777 - def get_determinant(self):
1778 """Return determinant.""" 1779 return (self.m_11*self.m_22*self.m_33 1780 +self.m_12*self.m_23*self.m_31 1781 +self.m_13*self.m_21*self.m_32 1782 -self.m_31*self.m_22*self.m_13 1783 -self.m_21*self.m_12*self.m_33 1784 -self.m_11*self.m_32*self.m_23)
1785
1786 - def get_scale(self):
1787 """Gets the scale (assuming is_scale_rotation is true!).""" 1788 scale = self.get_determinant() 1789 if scale < 0: 1790 return -((-scale)**(1.0/3.0)) 1791 else: 1792 return scale**(1.0/3.0)
1793
1794 - def get_scale_rotation(self):
1795 """Decompose the matrix into scale and rotation, where scale is a float 1796 and rotation is a C{Matrix33}. Returns a pair (scale, rotation).""" 1797 rot = self.get_copy() 1798 scale = self.get_scale() 1799 if abs(scale) < NifFormat.EPSILON: 1800 raise ZeroDivisionError('scale is zero, unable to obtain rotation') 1801 rot /= scale 1802 return (scale, rot)
1803
1804 - def set_scale_rotation(self, scale, rotation):
1805 """Compose the matrix as the product of scale * rotation.""" 1806 if not isinstance(scale, (float, int, long)): 1807 raise TypeError('scale must be float') 1808 if not isinstance(rotation, NifFormat.Matrix33): 1809 raise TypeError('rotation must be Matrix33') 1810 1811 if not rotation.is_rotation(): 1812 raise ValueError('rotation must be rotation matrix') 1813 1814 self.m_11 = rotation.m_11 * scale 1815 self.m_12 = rotation.m_12 * scale 1816 self.m_13 = rotation.m_13 * scale 1817 self.m_21 = rotation.m_21 * scale 1818 self.m_22 = rotation.m_22 * scale 1819 self.m_23 = rotation.m_23 * scale 1820 self.m_31 = rotation.m_31 * scale 1821 self.m_32 = rotation.m_32 * scale 1822 self.m_33 = rotation.m_33 * scale
1823
1824 - def get_scale_quat(self):
1825 """Decompose matrix into scale and quaternion.""" 1826 scale, rot = self.get_scale_rotation() 1827 quat = NifFormat.Quaternion() 1828 trace = 1.0 + rot.m_11 + rot.m_22 + rot.m_33 1829 1830 if trace > NifFormat.EPSILON: 1831 s = (trace ** 0.5) * 2 1832 quat.x = -( rot.m_32 - rot.m_23 ) / s 1833 quat.y = -( rot.m_13 - rot.m_31 ) / s 1834 quat.z = -( rot.m_21 - rot.m_12 ) / s 1835 quat.w = 0.25 * s 1836 elif rot.m_11 > max((rot.m_22, rot.m_33)): 1837 s = (( 1.0 + rot.m_11 - rot.m_22 - rot.m_33 ) ** 0.5) * 2 1838 quat.x = 0.25 * s 1839 quat.y = (rot.m_21 + rot.m_12 ) / s 1840 quat.z = (rot.m_13 + rot.m_31 ) / s 1841 quat.w = -(rot.m_32 - rot.m_23 ) / s 1842 elif rot.m_22 > rot.m_33: 1843 s = (( 1.0 + rot.m_22 - rot.m_11 - rot.m_33 ) ** 0.5) * 2 1844 quat.x = (rot.m_21 + rot.m_12 ) / s 1845 quat.y = 0.25 * s 1846 quat.z = (rot.m_32 + rot.m_23 ) / s 1847 quat.w = -(rot.m_13 - rot.m_31 ) / s 1848 else: 1849 s = (( 1.0 + rot.m_33 - rot.m_11 - rot.m_22 ) ** 0.5) * 2 1850 quat.x = (rot.m_13 + rot.m_31 ) / s 1851 quat.y = (rot.m_32 + rot.m_23 ) / s 1852 quat.z = 0.25 * s 1853 quat.w = -(rot.m_21 - rot.m_12 ) / s 1854 1855 return scale, quat
1856 1857
1858 - def get_inverse(self):
1859 """Get inverse (assuming is_scale_rotation is true!).""" 1860 # transpose inverts rotation but keeps the scale 1861 # dividing by scale^2 inverts the scale as well 1862 return self.get_transpose() / (self.m_11**2 + self.m_12**2 + self.m_13**2)
1863
1864 - def __mul__(self, rhs):
1865 if isinstance(rhs, (float, int, long)): 1866 mat = NifFormat.Matrix33() 1867 mat.m_11 = self.m_11 * rhs 1868 mat.m_12 = self.m_12 * rhs 1869 mat.m_13 = self.m_13 * rhs 1870 mat.m_21 = self.m_21 * rhs 1871 mat.m_22 = self.m_22 * rhs 1872 mat.m_23 = self.m_23 * rhs 1873 mat.m_31 = self.m_31 * rhs 1874 mat.m_32 = self.m_32 * rhs 1875 mat.m_33 = self.m_33 * rhs 1876 return mat 1877 elif isinstance(rhs, NifFormat.Vector3): 1878 raise TypeError( 1879 "matrix*vector not supported; " 1880 "please use left multiplication (vector*matrix)") 1881 elif isinstance(rhs, NifFormat.Matrix33): 1882 mat = NifFormat.Matrix33() 1883 mat.m_11 = self.m_11 * rhs.m_11 + self.m_12 * rhs.m_21 + self.m_13 * rhs.m_31 1884 mat.m_12 = self.m_11 * rhs.m_12 + self.m_12 * rhs.m_22 + self.m_13 * rhs.m_32 1885 mat.m_13 = self.m_11 * rhs.m_13 + self.m_12 * rhs.m_23 + self.m_13 * rhs.m_33 1886 mat.m_21 = self.m_21 * rhs.m_11 + self.m_22 * rhs.m_21 + self.m_23 * rhs.m_31 1887 mat.m_22 = self.m_21 * rhs.m_12 + self.m_22 * rhs.m_22 + self.m_23 * rhs.m_32 1888 mat.m_23 = self.m_21 * rhs.m_13 + self.m_22 * rhs.m_23 + self.m_23 * rhs.m_33 1889 mat.m_31 = self.m_31 * rhs.m_11 + self.m_32 * rhs.m_21 + self.m_33 * rhs.m_31 1890 mat.m_32 = self.m_31 * rhs.m_12 + self.m_32 * rhs.m_22 + self.m_33 * rhs.m_32 1891 mat.m_33 = self.m_31 * rhs.m_13 + self.m_32 * rhs.m_23 + self.m_33 * rhs.m_33 1892 return mat 1893 else: 1894 raise TypeError( 1895 "do not know how to multiply Matrix33 with %s"%rhs.__class__)
1896
1897 - def __div__(self, rhs):
1898 if isinstance(rhs, (float, int, long)): 1899 mat = NifFormat.Matrix33() 1900 mat.m_11 = self.m_11 / rhs 1901 mat.m_12 = self.m_12 / rhs 1902 mat.m_13 = self.m_13 / rhs 1903 mat.m_21 = self.m_21 / rhs 1904 mat.m_22 = self.m_22 / rhs 1905 mat.m_23 = self.m_23 / rhs 1906 mat.m_31 = self.m_31 / rhs 1907 mat.m_32 = self.m_32 / rhs 1908 mat.m_33 = self.m_33 / rhs 1909 return mat 1910 else: 1911 raise TypeError( 1912 "do not know how to divide Matrix33 by %s"%rhs.__class__)
1913 1914 # py3k 1915 __truediv__ = __div__ 1916
1917 - def __rmul__(self, lhs):
1918 if isinstance(lhs, (float, int, long)): 1919 return self * lhs # commutes 1920 else: 1921 raise TypeError( 1922 "do not know how to multiply %s with Matrix33"%lhs.__class__)
1923
1924 - def __eq__(self, mat):
1925 if not isinstance(mat, NifFormat.Matrix33): 1926 raise TypeError( 1927 "do not know how to compare Matrix33 and %s"%mat.__class__) 1928 if (abs(self.m_11 - mat.m_11) > NifFormat.EPSILON 1929 or abs(self.m_12 - mat.m_12) > NifFormat.EPSILON 1930 or abs(self.m_13 - mat.m_13) > NifFormat.EPSILON 1931 or abs(self.m_21 - mat.m_21) > NifFormat.EPSILON 1932 or abs(self.m_22 - mat.m_22) > NifFormat.EPSILON 1933 or abs(self.m_23 - mat.m_23) > NifFormat.EPSILON 1934 or abs(self.m_31 - mat.m_31) > NifFormat.EPSILON 1935 or abs(self.m_32 - mat.m_32) > NifFormat.EPSILON 1936 or abs(self.m_33 - mat.m_33) > NifFormat.EPSILON): 1937 return False 1938 return True
1939
1940 - def __ne__(self, mat):
1941 return not self.__eq__(mat)
1942
1943 - def __sub__(self, x):
1944 if isinstance(x, (NifFormat.Matrix33)): 1945 m = NifFormat.Matrix33() 1946 m.m_11 = self.m_11 - x.m_11 1947 m.m_12 = self.m_12 - x.m_12 1948 m.m_13 = self.m_13 - x.m_13 1949 m.m_21 = self.m_21 - x.m_21 1950 m.m_22 = self.m_22 - x.m_22 1951 m.m_23 = self.m_23 - x.m_23 1952 m.m_31 = self.m_31 - x.m_31 1953 m.m_32 = self.m_32 - x.m_32 1954 m.m_33 = self.m_33 - x.m_33 1955 return m 1956 elif isinstance(x, (int, long, float)): 1957 m = NifFormat.Matrix33() 1958 m.m_11 = self.m_11 - x 1959 m.m_12 = self.m_12 - x 1960 m.m_13 = self.m_13 - x 1961 m.m_21 = self.m_21 - x 1962 m.m_22 = self.m_22 - x 1963 m.m_23 = self.m_23 - x 1964 m.m_31 = self.m_31 - x 1965 m.m_32 = self.m_32 - x 1966 m.m_33 = self.m_33 - x 1967 return m 1968 else: 1969 raise TypeError("do not know how to substract Matrix33 and %s" 1970 % x.__class__)
1971
1972 - def sup_norm(self):
1973 """Calculate supremum norm of matrix (maximum absolute value of all 1974 entries).""" 1975 return max(max(abs(elem) for elem in row) 1976 for row in self.as_list())
1977
1978 - class Vector3:
1979 - def as_list(self):
1980 return [self.x, self.y, self.z]
1981
1982 - def as_tuple(self):
1983 return (self.x, self.y, self.z)
1984
1985 - def norm(self, sqrt=math.sqrt):
1986 return sqrt(self.x*self.x + self.y*self.y + self.z*self.z)
1987
1988 - def normalize(self, ignore_error=False, sqrt=math.sqrt):
1989 # inlining norm() to reduce overhead 1990 try: 1991 factor = 1.0 / sqrt(self.x*self.x + self.y*self.y + self.z*self.z) 1992 except ZeroDivisionError: 1993 if not ignore_error: 1994 raise 1995 else: 1996 return 1997 # inlining multiplication for speed 1998 self.x *= factor 1999 self.y *= factor 2000 self.z *= factor
2001
2002 - def normalized(self, ignore_error=False):
2003 vec = self.get_copy() 2004 vec.normalize(ignore_error=ignore_error) 2005 return vec
2006
2007 - def get_copy(self):
2008 v = NifFormat.Vector3() 2009 v.x = self.x 2010 v.y = self.y 2011 v.z = self.z 2012 return v
2013
2014 - def __str__(self):
2015 return "[ %6.3f %6.3f %6.3f ]"%(self.x, self.y, self.z)
2016
2017 - def __mul__(self, x):
2018 if isinstance(x, (float, int, long)): 2019 v = NifFormat.Vector3() 2020 v.x = self.x * x 2021 v.y = self.y * x 2022 v.z = self.z * x 2023 return v 2024 elif isinstance(x, NifFormat.Vector3): 2025 return self.x * x.x + self.y * x.y + self.z * x.z 2026 elif isinstance(x, NifFormat.Matrix33): 2027 v = NifFormat.Vector3() 2028 v.x = self.x * x.m_11 + self.y * x.m_21 + self.z * x.m_31 2029 v.y = self.x * x.m_12 + self.y * x.m_22 + self.z * x.m_32 2030 v.z = self.x * x.m_13 + self.y * x.m_23 + self.z * x.m_33 2031 return v 2032 elif isinstance(x, NifFormat.Matrix44): 2033 return self * x.get_matrix_33() + x.get_translation() 2034 else: 2035 raise TypeError("do not know how to multiply Vector3 with %s"%x.__class__)
2036
2037 - def __rmul__(self, x):
2038 if isinstance(x, (float, int, long)): 2039 v = NifFormat.Vector3() 2040 v.x = x * self.x 2041 v.y = x * self.y 2042 v.z = x * self.z 2043 return v 2044 else: 2045 raise TypeError("do not know how to multiply %s and Vector3"%x.__class__)
2046
2047 - def __div__(self, x):
2048 if isinstance(x, (float, int, long)): 2049 v = NifFormat.Vector3() 2050 v.x = self.x / x 2051 v.y = self.y / x 2052 v.z = self.z / x 2053 return v 2054 else: 2055 raise TypeError("do not know how to divide Vector3 and %s"%x.__class__)
2056 2057 # py3k 2058 __truediv__ = __div__ 2059
2060 - def __add__(self, x):
2061 if isinstance(x, (float, int, long)): 2062 v = NifFormat.Vector3() 2063 v.x = self.x + x 2064 v.y = self.y + x 2065 v.z = self.z + x 2066 return v 2067 elif isinstance(x, NifFormat.Vector3): 2068 v = NifFormat.Vector3() 2069 v.x = self.x + x.x 2070 v.y = self.y + x.y 2071 v.z = self.z + x.z 2072 return v 2073 else: 2074 raise TypeError("do not know how to add Vector3 and %s"%x.__class__)
2075
2076 - def __radd__(self, x):
2077 if isinstance(x, (float, int, long)): 2078 v = NifFormat.Vector3() 2079 v.x = x + self.x 2080 v.y = x + self.y 2081 v.z = x + self.z 2082 return v 2083 else: 2084 raise TypeError("do not know how to add %s and Vector3"%x.__class__)
2085
2086 - def __sub__(self, x):
2087 if isinstance(x, (float, int, long)): 2088 v = NifFormat.Vector3() 2089 v.x = self.x - x 2090 v.y = self.y - x 2091 v.z = self.z - x 2092 return v 2093 elif isinstance(x, NifFormat.Vector3): 2094 v = NifFormat.Vector3() 2095 v.x = self.x - x.x 2096 v.y = self.y - x.y 2097 v.z = self.z - x.z 2098 return v 2099 else: 2100 raise TypeError("do not know how to substract Vector3 and %s"%x.__class__)
2101
2102 - def __rsub__(self, x):
2103 if isinstance(x, (float, int, long)): 2104 v = NifFormat.Vector3() 2105 v.x = x - self.x 2106 v.y = x - self.y 2107 v.z = x - self.z 2108 return v 2109 else: 2110 raise TypeError("do not know how to substract %s and Vector3"%x.__class__)
2111
2112 - def __neg__(self):
2113 v = NifFormat.Vector3() 2114 v.x = -self.x 2115 v.y = -self.y 2116 v.z = -self.z 2117 return v
2118 2119 # cross product
2120 - def crossproduct(self, x):
2121 if isinstance(x, NifFormat.Vector3): 2122 v = NifFormat.Vector3() 2123 v.x = self.y*x.z - self.z*x.y 2124 v.y = self.z*x.x - self.x*x.z 2125 v.z = self.x*x.y - self.y*x.x 2126 return v 2127 else: 2128 raise TypeError("do not know how to calculate crossproduct of Vector3 and %s"%x.__class__)
2129
2130 - def __eq__(self, x):
2131 if isinstance(x, type(None)): 2132 return False 2133 if not isinstance(x, NifFormat.Vector3): 2134 raise TypeError("do not know how to compare Vector3 and %s"%x.__class__) 2135 if abs(self.x - x.x) > NifFormat.EPSILON: return False 2136 if abs(self.y - x.y) > NifFormat.EPSILON: return False 2137 if abs(self.z - x.z) > NifFormat.EPSILON: return False 2138 return True
2139
2140 - def __ne__(self, x):
2141 return not self.__eq__(x)
2142
2143 - class Vector4:
2144 """ 2145 >>> from pyffi.formats.nif import NifFormat 2146 >>> vec = NifFormat.Vector4() 2147 >>> vec.x = 1.0 2148 >>> vec.y = 2.0 2149 >>> vec.z = 3.0 2150 >>> vec.w = 4.0 2151 >>> print(vec) 2152 [ 1.000 2.000 3.000 4.000 ] 2153 >>> vec.as_list() 2154 [1.0, 2.0, 3.0, 4.0] 2155 >>> vec.as_tuple() 2156 (1.0, 2.0, 3.0, 4.0) 2157 >>> print(vec.get_vector_3()) 2158 [ 1.000 2.000 3.000 ] 2159 >>> vec2 = NifFormat.Vector4() 2160 >>> vec == vec2 2161 False 2162 >>> vec2.x = 1.0 2163 >>> vec2.y = 2.0 2164 >>> vec2.z = 3.0 2165 >>> vec2.w = 4.0 2166 >>> vec == vec2 2167 True 2168 """ 2169
2170 - def as_list(self):
2171 return [self.x, self.y, self.z, self.w]
2172
2173 - def as_tuple(self):
2174 return (self.x, self.y, self.z, self.w)
2175
2176 - def get_copy(self):
2177 v = NifFormat.Vector4() 2178 v.x = self.x 2179 v.y = self.y 2180 v.z = self.z 2181 v.w = self.w 2182 return v
2183
2184 - def get_vector_3(self):
2185 v = NifFormat.Vector3() 2186 v.x = self.x 2187 v.y = self.y 2188 v.z = self.z 2189 return v
2190
2191 - def __str__(self):
2192 return "[ %6.3f %6.3f %6.3f %6.3f ]"%(self.x, self.y, self.z, self.w)
2193
2194 - def __eq__(self, rhs):
2195 if isinstance(rhs, type(None)): 2196 return False 2197 if not isinstance(rhs, NifFormat.Vector4): 2198 raise TypeError( 2199 "do not know how to compare Vector4 and %s" % rhs.__class__) 2200 if abs(self.x - rhs.x) > NifFormat.EPSILON: return False 2201 if abs(self.y - rhs.y) > NifFormat.EPSILON: return False 2202 if abs(self.z - rhs.z) > NifFormat.EPSILON: return False 2203 if abs(self.w - rhs.w) > NifFormat.EPSILON: return False 2204 return True
2205
2206 - def __ne__(self, rhs):
2207 return not self.__eq__(rhs)
2208
2209 - class SkinPartition:
2210 - def get_triangles(self):
2211 """Get list of triangles of this partition. 2212 """ 2213 # strips? 2214 if self.num_strips: 2215 for tri in pyffi.utils.tristrip.triangulate(self.strips): 2216 yield tri 2217 # no strips, do triangles 2218 else: 2219 for tri in self.triangles: 2220 yield (tri.v_1, tri.v_2, tri.v_3)
2221
2222 - def get_mapped_triangles(self):
2223 """Get list of triangles of this partition (mapping into the 2224 geometry data vertex list). 2225 """ 2226 for tri in self.get_triangles(): 2227 yield tuple(self.vertex_map[v_index] for v_index in tri)
2228
2229 - class bhkBoxShape:
2230 - def apply_scale(self, scale):
2231 """Apply scale factor C{scale} on data.""" 2232 # apply scale on dimensions 2233 self.dimensions.x *= scale 2234 self.dimensions.y *= scale 2235 self.dimensions.z *= scale 2236 self.minimum_size *= scale
2237
2238 - def get_mass_center_inertia(self, density = 1, solid = True):
2239 """Return mass, center, and inertia tensor.""" 2240 # the dimensions describe half the size of the box in each dimension 2241 # so the length of a single edge is dimension.dir * 2 2242 mass, inertia = pyffi.utils.inertia.getMassInertiaBox( 2243 (self.dimensions.x * 2, self.dimensions.y * 2, self.dimensions.z * 2), 2244 density = density, solid = solid) 2245 return mass, (0,0,0), inertia
2246
2247 - class bhkCapsuleShape:
2248 - def apply_scale(self, scale):
2249 """Apply scale factor <scale> on data.""" 2250 # apply scale on dimensions 2251 self.radius *= scale 2252 self.radius_1 *= scale 2253 self.radius_2 *= scale 2254 self.first_point.x *= scale 2255 self.first_point.y *= scale 2256 self.first_point.z *= scale 2257 self.second_point.x *= scale 2258 self.second_point.y *= scale 2259 self.second_point.z *= scale
2260
2261 - def get_mass_center_inertia(self, density = 1, solid = True):
2262 """Return mass, center, and inertia tensor.""" 2263 # (assumes self.radius == self.radius_1 == self.radius_2) 2264 length = (self.first_point - self.second_point).norm() 2265 mass, inertia = pyffi.utils.inertia.getMassInertiaCapsule( 2266 radius = self.radius, length = length, 2267 density = density, solid = solid) 2268 # now fix inertia so it is expressed in the right coordinates 2269 # need a transform that maps (0,0,length/2) on (second - first) / 2 2270 # and (0,0,-length/2) on (first - second)/2 2271 vec1 = ((self.second_point - self.first_point) / length).as_tuple() 2272 # find an orthogonal vector to vec1 2273 index = min(enumerate(vec1), key=lambda val: abs(val[1]))[0] 2274 vec2 = vecCrossProduct(vec1, tuple((1 if i == index else 0) 2275 for i in xrange(3))) 2276 vec2 = vecscalarMul(vec2, 1/vecNorm(vec2)) 2277 # find an orthogonal vector to vec1 and vec2 2278 vec3 = vecCrossProduct(vec1, vec2) 2279 # get transform matrix 2280 transform_transposed = (vec2, vec3, vec1) # this is effectively the transposed of our transform 2281 transform = matTransposed(transform_transposed) 2282 # check the result (debug) 2283 assert(vecDistance(matvecMul(transform, (0,0,1)), vec1) < 0.0001) 2284 assert(abs(matDeterminant(transform) - 1) < 0.0001) 2285 # transform the inertia tensor 2286 inertia = matMul(matMul(transform_transposed, inertia), transform) 2287 return (mass, 2288 ((self.first_point + self.second_point) * 0.5).as_tuple(), 2289 inertia)
2290
2291 - class bhkConstraint:
2292 - def get_transform_a_b(self, parent):
2293 """Returns the transform of the first entity relative to the second 2294 entity. Root is simply a nif block that is a common parent to both 2295 blocks.""" 2296 # check entities 2297 if self.num_entities != 2: 2298 raise ValueError( 2299 "cannot get tranform for constraint " 2300 "that hasn't exactly 2 entities") 2301 # find transform of entity A relative to entity B 2302 2303 # find chains from parent to A and B entities 2304 chainA = parent.find_chain(self.entities[0]) 2305 chainB = parent.find_chain(self.entities[1]) 2306 # validate the chains 2307 assert(isinstance(chainA[-1], NifFormat.bhkRigidBody)) 2308 assert(isinstance(chainA[-2], NifFormat.NiCollisionObject)) 2309 assert(isinstance(chainA[-3], NifFormat.NiNode)) 2310 assert(isinstance(chainB[-1], NifFormat.bhkRigidBody)) 2311 assert(isinstance(chainB[-2], NifFormat.NiCollisionObject)) 2312 assert(isinstance(chainB[-3], NifFormat.NiNode)) 2313 # return the relative transform 2314 return (chainA[-3].get_transform(relative_to = parent) 2315 * chainB[-3].get_transform(relative_to = parent).get_inverse())
2316
2317 - class bhkConvexVerticesShape:
2318 - def apply_scale(self, scale):
2319 """Apply scale factor on data.""" 2320 if abs(scale - 1.0) < NifFormat.EPSILON: return 2321 for v in self.vertices: 2322 v.x *= scale 2323 v.y *= scale 2324 v.z *= scale 2325 for n in self.normals: 2326 n.w *= scale
2327
2328 - def get_mass_center_inertia(self, density = 1, solid = True):
2329 """Return mass, center, and inertia tensor.""" 2330 # first find an enumeration of all triangles making up the convex shape 2331 vertices, triangles = pyffi.utils.quickhull.qhull3d( 2332 [vert.as_tuple() for vert in self.vertices]) 2333 # now calculate mass, center, and inertia 2334 return pyffi.utils.inertia.get_mass_center_inertia_polyhedron( 2335 vertices, triangles, density = density, solid = solid)
2336
2337 - class bhkLimitedHingeConstraint:
2338 - def apply_scale(self, scale):
2339 """Scale data.""" 2340 # apply scale on transform 2341 self.limited_hinge.pivot_a.x *= scale 2342 self.limited_hinge.pivot_a.y *= scale 2343 self.limited_hinge.pivot_a.z *= scale 2344 self.limited_hinge.pivot_b.x *= scale 2345 self.limited_hinge.pivot_b.y *= scale 2346 self.limited_hinge.pivot_b.z *= scale
2347
2348 - def update_a_b(self, parent):
2349 """Update the B data from the A data. The parent argument is simply a 2350 common parent to the entities.""" 2351 self.limited_hinge.update_a_b(self.get_transform_a_b(parent))
2352
2353 - class bhkListShape:
2354 - def get_mass_center_inertia(self, density = 1, solid = True):
2355 """Return center of gravity and area.""" 2356 subshapes_mci = [ subshape.get_mass_center_inertia(density = density, 2357 solid = solid) 2358 for subshape in self.sub_shapes ] 2359 total_mass = 0 2360 total_center = (0, 0, 0) 2361 total_inertia = ((0, 0, 0), (0, 0, 0), (0, 0, 0)) 2362 2363 # get total mass 2364 for mass, center, inertia in subshapes_mci: 2365 total_mass += mass 2366 if total_mass == 0: 2367 return 0, (0, 0, 0), ((0, 0, 0), (0, 0, 0), (0, 0, 0)) 2368 2369 # get average center and inertia 2370 for mass, center, inertia in subshapes_mci: 2371 total_center = vecAdd(total_center, 2372 vecscalarMul(center, mass / total_mass)) 2373 total_inertia = matAdd(total_inertia, inertia) 2374 return total_mass, total_center, total_inertia
2375
2376 - def add_shape(self, shape, front = False):
2377 """Add shape to list.""" 2378 # check if it's already there 2379 if shape in self.sub_shapes: return 2380 # increase number of shapes 2381 num_shapes = self.num_sub_shapes 2382 self.num_sub_shapes = num_shapes + 1 2383 self.sub_shapes.update_size() 2384 # add the shape 2385 if not front: 2386 self.sub_shapes[num_shapes] = shape 2387 else: 2388 for i in xrange(num_shapes, 0, -1): 2389 self.sub_shapes[i] = self.sub_shapes[i-1] 2390 self.sub_shapes[0] = shape 2391 # expand list of unknown ints as well 2392 self.num_unknown_ints = num_shapes + 1 2393 self.unknown_ints.update_size()
2394
2395 - def remove_shape(self, shape):
2396 """Remove a shape from the shape list.""" 2397 # get list of shapes excluding the shape to remove 2398 shapes = [s for s in self.sub_shapes if s != shape] 2399 # set sub_shapes to this list 2400 self.num_sub_shapes = len(shapes) 2401 self.sub_shapes.update_size() 2402 for i, s in enumerate(shapes): 2403 self.sub_shapes[i] = s 2404 # update unknown ints 2405 self.num_unknown_ints = len(shapes) 2406 self.unknown_ints.update_size()
2407
2408 - class bhkMalleableConstraint:
2409 - def apply_scale(self, scale):
2410 """Scale data.""" 2411 # apply scale on transform 2412 self.ragdoll.pivot_a.x *= scale 2413 self.ragdoll.pivot_a.y *= scale 2414 self.ragdoll.pivot_a.z *= scale 2415 self.ragdoll.pivot_b.x *= scale 2416 self.ragdoll.pivot_b.y *= scale 2417 self.ragdoll.pivot_b.z *= scale 2418 self.limited_hinge.pivot_a.x *= scale 2419 self.limited_hinge.pivot_a.y *= scale 2420 self.limited_hinge.pivot_a.z *= scale 2421 self.limited_hinge.pivot_b.x *= scale 2422 self.limited_hinge.pivot_b.y *= scale 2423 self.limited_hinge.pivot_b.z *= scale
2424
2425 - def update_a_b(self, parent):
2426 """Update the B data from the A data.""" 2427 transform = self.get_transform_a_b(parent) 2428 self.limited_hinge.update_a_b(transform) 2429 self.ragdoll.update_a_b(transform)
2430
2431 - class bhkMoppBvTreeShape:
2432 - def get_mass_center_inertia(self, density=1, solid=True):
2433 """Return mass, center of gravity, and inertia tensor.""" 2434 return self.get_shape_mass_center_inertia( 2435 density=density, solid=solid)
2436
2437 - def update_origin_scale(self):
2438 """Update scale and origin.""" 2439 minx = min(v.x for v in self.shape.data.vertices) 2440 miny = min(v.y for v in self.shape.data.vertices) 2441 minz = min(v.z for v in self.shape.data.vertices) 2442 maxx = max(v.x for v in self.shape.data.vertices) 2443 maxy = max(v.y for v in self.shape.data.vertices) 2444 maxz = max(v.z for v in self.shape.data.vertices) 2445 self.origin.x = minx - 0.1 2446 self.origin.y = miny - 0.1 2447 self.origin.z = minz - 0.1 2448 self.scale = (256*256*254) / (0.2+max([maxx-minx,maxy-miny,maxz-minz]))
2449
2450 - def update_mopp(self):
2451 """Update the MOPP data, scale, and origin, and welding info. 2452 2453 @deprecated: use update_mopp_welding instead 2454 """ 2455 self.update_mopp_welding()
2456
2457 - def update_mopp_welding(self):
2458 """Update the MOPP data, scale, and origin, and welding info.""" 2459 logger = logging.getLogger("pyffi.mopp") 2460 # check type of shape 2461 if not isinstance(self.shape, NifFormat.bhkPackedNiTriStripsShape): 2462 raise ValueError( 2463 "expected bhkPackedNiTriStripsShape on mopp" 2464 " but got %s instead" % self.shape.__class__.__name__) 2465 # first try with pyffi.utils.mopp 2466 failed = False 2467 try: 2468 print(pyffi.utils.mopp.getMopperCredits()) 2469 except (OSError, RuntimeError): 2470 failed = True 2471 else: 2472 # find material indices per triangle 2473 material_per_vertex = [] 2474 for subshape in self.shape.get_sub_shapes(): 2475 material_per_vertex += ( 2476 [subshape.material] * subshape.num_vertices) 2477 material_per_triangle = [ 2478 material_per_vertex[hktri.triangle.v_1] 2479 for hktri in self.shape.data.triangles] 2480 # compute havok info 2481 try: 2482 origin, scale, mopp, welding_infos \ 2483 = pyffi.utils.mopp.getMopperOriginScaleCodeWelding( 2484 [vert.as_tuple() for vert in self.shape.data.vertices], 2485 [(hktri.triangle.v_1, 2486 hktri.triangle.v_2, 2487 hktri.triangle.v_3) 2488 for hktri in self.shape.data.triangles], 2489 material_per_triangle) 2490 except (OSError, RuntimeError): 2491 failed = True 2492 else: 2493 # must use calculated scale and origin 2494 self.scale = scale 2495 self.origin.x = origin[0] 2496 self.origin.y = origin[1] 2497 self.origin.z = origin[2] 2498 # if havok's mopper failed, do a simple mopp 2499 if failed: 2500 logger.exception( 2501 "Havok mopp generator failed, falling back on simple mopp " 2502 "(but collisions may be flawed in-game!)." 2503 "If you are using the PyFFI that was shipped with Blender, " 2504 "and you are on Windows, then you may wish to install the " 2505 "full version of PyFFI from " 2506 "http://pyffi.sourceforge.net/ " 2507 "instead, which includes the (closed source) " 2508 "Havok mopp generator.") 2509 self.update_origin_scale() 2510 mopp = self._makeSimpleMopp() 2511 # no welding info 2512 welding_infos = [] 2513 2514 # delete mopp and replace with new data 2515 self.mopp_data_size = len(mopp) 2516 self.mopp_data.update_size() 2517 for i, b in enumerate(mopp): 2518 self.mopp_data[i] = b 2519 2520 # update welding information 2521 for hktri, welding_info in izip(self.shape.data.triangles, welding_infos): 2522 hktri.welding_info = welding_info
2523
2524 - def _makeSimpleMopp(self):
2525 """Make a simple mopp.""" 2526 mopp = [] # the mopp 'assembly' script 2527 self._q = 256*256 / self.scale # quantization factor 2528 2529 # opcodes 2530 BOUNDX = 0x26 2531 BOUNDY = 0x27 2532 BOUNDZ = 0x28 2533 TESTX = 0x10 2534 TESTY = 0x11 2535 TESTZ = 0x12 2536 2537 # add first crude bounding box checks 2538 self._vertsceil = [ self._moppCeil(v) for v in self.shape.data.vertices ] 2539 self._vertsfloor = [ self._moppFloor(v) for v in self.shape.data.vertices ] 2540 minx = min([ v[0] for v in self._vertsfloor ]) 2541 miny = min([ v[1] for v in self._vertsfloor ]) 2542 minz = min([ v[2] for v in self._vertsfloor ]) 2543 maxx = max([ v[0] for v in self._vertsceil ]) 2544 maxy = max([ v[1] for v in self._vertsceil ]) 2545 maxz = max([ v[2] for v in self._vertsceil ]) 2546 if minx < 0 or miny < 0 or minz < 0: raise ValueError("cannot update mopp tree with invalid origin") 2547 if maxx > 255 or maxy > 255 or maxz > 255: raise ValueError("cannot update mopp tree with invalid scale") 2548 mopp.extend([BOUNDZ, minz, maxz]) 2549 mopp.extend([BOUNDY, miny, maxy]) 2550 mopp.extend([BOUNDX, minx, maxx]) 2551 2552 # add tree using subsequent X-Y-Z splits 2553 # (slow and no noticable difference from other simple tree so deactivated) 2554 #tris = range(len(self.shape.data.triangles)) 2555 #tree = self.split_triangles(tris, [[minx,maxx],[miny,maxy],[minz,maxz]]) 2556 #mopp += self.mopp_from_tree(tree) 2557 2558 # add a trivial tree 2559 # this prevents the player of walking through the model 2560 # but arrows may still fly through 2561 numtriangles = len(self.shape.data.triangles) 2562 i = 0x30 2563 for t in xrange(numtriangles-1): 2564 mopp.extend([TESTZ, maxz, 0, 1, i]) 2565 i += 1 2566 if i == 0x50: 2567 mopp.extend([0x09, 0x20]) # increment triangle offset 2568 i = 0x30 2569 mopp.extend([i]) 2570 2571 return mopp
2572
2573 - def _moppCeil(self, v):
2574 moppx = int((v.x + 0.1 - self.origin.x) / self._q + 0.99999999) 2575 moppy = int((v.y + 0.1 - self.origin.y) / self._q + 0.99999999) 2576 moppz = int((v.z + 0.1 - self.origin.z) / self._q + 0.99999999) 2577 return [moppx, moppy, moppz]
2578
2579 - def _moppFloor(self, v):
2580 moppx = int((v.x - 0.1 - self.origin.x) / self._q) 2581 moppy = int((v.y - 0.1 - self.origin.y) / self._q) 2582 moppz = int((v.z - 0.1 - self.origin.z) / self._q) 2583 return [moppx, moppy, moppz]
2584
2585 - def split_triangles(self, ts, bbox, dir=0):
2586 """Direction 0=X, 1=Y, 2=Z""" 2587 btest = [] # for bounding box tests 2588 test = [] # for branch command 2589 # check bounding box 2590 tris = [ t.triangle for t in self.shape.data.triangles ] 2591 tsverts = [ tris[t].v_1 for t in ts] + [ tris[t].v_2 for t in ts] + [ tris[t].v_3 for t in ts] 2592 minx = min([self._vertsfloor[v][0] for v in tsverts]) 2593 miny = min([self._vertsfloor[v][1] for v in tsverts]) 2594 minz = min([self._vertsfloor[v][2] for v in tsverts]) 2595 maxx = max([self._vertsceil[v][0] for v in tsverts]) 2596 maxy = max([self._vertsceil[v][1] for v in tsverts]) 2597 maxz = max([self._vertsceil[v][2] for v in tsverts]) 2598 # add bounding box checks if it's reduced in a direction 2599 if (maxx - minx < bbox[0][1] - bbox[0][0]): 2600 btest += [ 0x26, minx, maxx ] 2601 bbox[0][0] = minx 2602 bbox[0][1] = maxx 2603 if (maxy - miny < bbox[1][1] - bbox[1][0]): 2604 btest += [ 0x27, miny, maxy ] 2605 bbox[1][0] = miny 2606 bbox[1][1] = maxy 2607 if (maxz - minz < bbox[2][1] - bbox[2][0]): 2608 btest += [ 0x28, minz, maxz ] 2609 bbox[2][0] = minz 2610 bbox[2][1] = maxz 2611 # if only one triangle, no further split needed 2612 if len(ts) == 1: 2613 if ts[0] < 32: 2614 return [ btest, [ 0x30 + ts[0] ], [], [] ] 2615 elif ts[0] < 256: 2616 return [ btest, [ 0x50, ts[0] ], [], [] ] 2617 else: 2618 return [ btest, [ 0x51, ts[0] >> 8, ts[0] & 255 ], [], [] ] 2619 # sort triangles in required direction 2620 ts.sort(key = lambda t: max(self._vertsceil[tris[t].v_1][dir], self._vertsceil[tris[t].v_2][dir], self._vertsceil[tris[t].v_3][dir])) 2621 # split into two 2622 ts1 = ts[:len(ts)/2] 2623 ts2 = ts[len(ts)/2:] 2624 # get maximum coordinate of small group 2625 ts1verts = [ tris[t].v_1 for t in ts1] + [ tris[t].v_2 for t in ts1] + [ tris[t].v_3 for t in ts1] 2626 ts2verts = [ tris[t].v_1 for t in ts2] + [ tris[t].v_2 for t in ts2] + [ tris[t].v_3 for t in ts2] 2627 ts1max = max([self._vertsceil[v][dir] for v in ts1verts]) 2628 # get minimum coordinate of large group 2629 ts2min = min([self._vertsfloor[v][dir] for v in ts2verts]) 2630 # set up test 2631 test += [0x10+dir, ts1max, ts2min] 2632 # set up new bounding boxes for each subtree 2633 # make copy 2634 bbox1 = [[bbox[0][0],bbox[0][1]],[bbox[1][0],bbox[1][1]],[bbox[2][0],bbox[2][1]]] 2635 bbox2 = [[bbox[0][0],bbox[0][1]],[bbox[1][0],bbox[1][1]],[bbox[2][0],bbox[2][1]]] 2636 # update bound in test direction 2637 bbox1[dir][1] = ts1max 2638 bbox2[dir][0] = ts2min 2639 # return result 2640 nextdir = dir+1 2641 if nextdir == 3: nextdir = 0 2642 return [btest, test, self.split_triangles(ts1, bbox1, nextdir), self.split_triangles(ts2, bbox2, nextdir)]
2643
2644 - def mopp_from_tree(self, tree):
2645 if tree[1][0] in xrange(0x30, 0x52): 2646 return tree[0] + tree[1] 2647 mopp = tree[0] + tree[1] 2648 submopp1 = self.mopp_from_tree(tree[2]) 2649 submopp2 = self.mopp_from_tree(tree[3]) 2650 if len(submopp1) < 256: 2651 mopp += [ len(submopp1) ] 2652 mopp += submopp1 2653 mopp += submopp2 2654 else: 2655 jump = len(submopp2) 2656 if jump <= 255: 2657 mopp += [2, 0x05, jump] 2658 else: 2659 mopp += [3, 0x06, jump >> 8, jump & 255] 2660 mopp += submopp2 2661 mopp += submopp1 2662 return mopp
2663 2664 # ported and extended from NifVis/bhkMoppBvTreeShape.py
2665 - def parse_mopp(self, start = 0, depth = 0, toffset = 0, verbose = False):
2666 """The mopp data is printed to the debug channel 2667 while parsed. Returns list of indices into mopp data of the bytes 2668 processed and a list of triangle indices encountered. 2669 2670 The verbose argument is ignored (and is deprecated). 2671 """ 2672 class Message: 2673 def __init__(self): 2674 self.logger = logging.getLogger("pyffi.mopp") 2675 self.msg = ""
2676 2677 def append(self, *args): 2678 self.msg += " ".join(str(arg) for arg in args) + " " 2679 return self
2680 2681 def debug(self): 2682 if self.msg: 2683 self.logger.debug(self.msg) 2684 self.msg = "" 2685 2686 def error(self): 2687 self.logger.error(self.msg) 2688 self.msg = "" 2689 2690 mopp = self.mopp_data # shortcut notation 2691 ids = [] # indices of bytes processed 2692 tris = [] # triangle indices 2693 i = start # current index 2694 ret = False # set to True if an opcode signals a triangle index 2695 while i < self.mopp_data_size and not ret: 2696 # get opcode and print it 2697 code = mopp[i] 2698 msg = Message() 2699 msg.append("%4i:"%i + " "*depth + '0x%02X ' % code) 2700 2701 if code == 0x09: 2702 # increment triangle offset 2703 toffset += mopp[i+1] 2704 msg.append(mopp[i+1]) 2705 msg.append('%i [ triangle offset += %i, offset is now %i ]' 2706 % (mopp[i+1], mopp[i+1], toffset)) 2707 ids.extend([i,i+1]) 2708 i += 2 2709 2710 elif code in [ 0x0A ]: 2711 # increment triangle offset 2712 toffset += mopp[i+1]*256 + mopp[i+2] 2713 msg.append(mopp[i+1],mopp[i+2]) 2714 msg.append('[ triangle offset += %i, offset is now %i ]' 2715 % (mopp[i+1]*256 + mopp[i+2], toffset)) 2716 ids.extend([i,i+1,i+2]) 2717 i += 3 2718 2719 elif code in [ 0x0B ]: 2720 # unsure about first two arguments, but the 3rd and 4th set triangle offset 2721 toffset = 256*mopp[i+3] + mopp[i+4] 2722 msg.append(mopp[i+1],mopp[i+2],mopp[i+3],mopp[i+4]) 2723 msg.append('[ triangle offset = %i ]' % toffset) 2724 ids.extend([i,i+1,i+2,i+3,i+4]) 2725 i += 5 2726 2727 elif code in xrange(0x30,0x50): 2728 # triangle compact 2729 msg.append('[ triangle %i ]'%(code-0x30+toffset)) 2730 ids.append(i) 2731 tris.append(code-0x30+toffset) 2732 i += 1 2733 ret = True 2734 2735 elif code == 0x50: 2736 # triangle byte 2737 msg.append(mopp[i+1]) 2738 msg.append('[ triangle %i ]'%(mopp[i+1]+toffset)) 2739 ids.extend([i,i+1]) 2740 tris.append(mopp[i+1]+toffset) 2741 i += 2 2742 ret = True 2743 2744 elif code in [ 0x51 ]: 2745 # triangle short 2746 t = mopp[i+1]*256 + mopp[i+2] + toffset 2747 msg.append(mopp[i+1],mopp[i+2]) 2748 msg.append('[ triangle %i ]' % t) 2749 ids.extend([i,i+1,i+2]) 2750 tris.append(t) 2751 i += 3 2752 ret = True 2753 2754 elif code in [ 0x53 ]: 2755 # triangle short? 2756 t = mopp[i+3]*256 + mopp[i+4] + toffset 2757 msg.append(mopp[i+1],mopp[i+2],mopp[i+3],mopp[i+4]) 2758 msg.append('[ triangle %i ]' % t) 2759 ids.extend([i,i+1,i+2,i+3,i+4]) 2760 tris.append(t) 2761 i += 5 2762 ret = True 2763 2764 elif code in [ 0x05 ]: 2765 # byte jump 2766 msg.append('[ jump -> %i: ]'%(i+2+mopp[i+1])) 2767 ids.extend([i,i+1]) 2768 i += 2+mopp[i+1] 2769 2770 elif code in [ 0x06 ]: 2771 # short jump 2772 jump = mopp[i+1]*256 + mopp[i+2] 2773 msg.append('[ jump -> %i: ]'%(i+3+jump)) 2774 ids.extend([i,i+1,i+2]) 2775 i += 3+jump 2776 2777 elif code in [0x10,0x11,0x12, 0x13,0x14,0x15, 0x16,0x17,0x18, 0x19, 0x1A, 0x1B, 0x1C]: 2778 # compact if-then-else with two arguments 2779 msg.append(mopp[i+1], mopp[i+2]) 2780 if code == 0x10: 2781 msg.append('[ branch X') 2782 elif code == 0x11: 2783 msg.append('[ branch Y') 2784 elif code == 0x12: 2785 msg.append('[ branch Z') 2786 else: 2787 msg.append('[ branch ?') 2788 msg.append('-> %i: %i: ]'%(i+4,i+4+mopp[i+3])) 2789 msg.debug() 2790 msg.append(" " + " "*depth + 'if:') 2791 msg.debug() 2792 idssub1, trissub1 = self.parse_mopp(start = i+4, depth = depth+1, toffset = toffset, verbose = verbose) 2793 msg.append(" " + " "*depth + 'else:') 2794 msg.debug() 2795 idssub2, trissub2 = self.parse_mopp(start = i+4+mopp[i+3], depth = depth+1, toffset = toffset, verbose = verbose) 2796 ids.extend([i,i+1,i+2,i+3]) 2797 ids.extend(idssub1) 2798 ids.extend(idssub2) 2799 tris.extend(trissub1) 2800 tris.extend(trissub2) 2801 ret = True 2802 2803 elif code in [0x20,0x21,0x22]: 2804 # compact if-then-else with one argument 2805 msg.append(mopp[i+1], '[ branch ? -> %i: %i: ]'%(i+3,i+3+mopp[i+2])).debug() 2806 msg.append(" " + " "*depth + 'if:').debug() 2807 idssub1, trissub1 = self.parse_mopp(start = i+3, depth = depth+1, toffset = toffset, verbose = verbose) 2808 msg.append(" " + " "*depth + 'else:').debug() 2809 idssub2, trissub2 = self.parse_mopp(start = i+3+mopp[i+2], depth = depth+1, toffset = toffset, verbose = verbose) 2810 ids.extend([i,i+1,i+2]) 2811 ids.extend(idssub1) 2812 ids.extend(idssub2) 2813 tris.extend(trissub1) 2814 tris.extend(trissub2) 2815 ret = True 2816 2817 elif code in [0x23,0x24,0x25]: # short if x <= a then 1; if x > b then 2; 2818 jump1 = mopp[i+3] * 256 + mopp[i+4] 2819 jump2 = mopp[i+5] * 256 + mopp[i+6] 2820 msg.append(mopp[i+1], mopp[i+2], '[ branch ? -> %i: %i: ]'%(i+7+jump1,i+7+jump2)).debug() 2821 msg.append(" " + " "*depth + 'if:').debug() 2822 idssub1, trissub1 = self.parse_mopp(start = i+7+jump1, depth = depth+1, toffset = toffset, verbose = verbose) 2823 msg.append(" " + " "*depth + 'else:').debug() 2824 idssub2, trissub2 = self.parse_mopp(start = i+7+jump2, depth = depth+1, toffset = toffset, verbose = verbose) 2825 ids.extend([i,i+1,i+2,i+3,i+4,i+5,i+6]) 2826 ids.extend(idssub1) 2827 ids.extend(idssub2) 2828 tris.extend(trissub1) 2829 tris.extend(trissub2) 2830 ret = True 2831 elif code in [0x26,0x27,0x28]: 2832 msg.append(mopp[i+1], mopp[i+2]) 2833 if code == 0x26: 2834 msg.append('[ bound X ]') 2835 elif code == 0x27: 2836 msg.append('[ bound Y ]') 2837 elif code == 0x28: 2838 msg.append('[ bound Z ]') 2839 ids.extend([i,i+1,i+2]) 2840 i += 3 2841 elif code in [0x01, 0x02, 0x03, 0x04]: 2842 msg.append(mopp[i+1], mopp[i+2], mopp[i+3], '[ bound XYZ? ]') 2843 ids.extend([i,i+1,i+2,i+3]) 2844 i += 4 2845 else: 2846 msg.append("unknown mopp code 0x%02X"%code).error() 2847 msg.append("following bytes are").debug() 2848 extrabytes = [mopp[j] for j in xrange(i+1,min(self.mopp_data_size,i+10))] 2849 extraindex = [j for j in xrange(i+1,min(self.mopp_data_size,i+10))] 2850 msg.append(extrabytes).debug() 2851 for b, j in zip(extrabytes, extraindex): 2852 if j+b+1 < self.mopp_data_size: 2853 msg.append("opcode after jump %i is 0x%02X"%(b,mopp[j+b+1]), [mopp[k] for k in xrange(j+b+2,min(self.mopp_data_size,j+b+11))]).debug() 2854 raise ValueError("unknown mopp opcode 0x%02X"%code) 2855 2856 msg.debug() 2857 2858 return ids, tris 2859
2860 - class bhkMultiSphereShape:
2861 - def get_mass_center_inertia(self, density = 1, solid = True):
2862 """Return center of gravity and area.""" 2863 subshapes_mci = [ 2864 (mass, center, inertia) 2865 for (mass, inertia), center in 2866 izip( ( pyffi.utils.inertia.getMassInertiaSphere(radius = sphere.radius, 2867 density = density, solid = solid) 2868 for sphere in self.spheres ), 2869 ( sphere.center.as_tuple() for sphere in self.spheres ) ) ] 2870 total_mass = 0 2871 total_center = (0, 0, 0) 2872 total_inertia = ((0, 0, 0), (0, 0, 0), (0, 0, 0)) 2873 for mass, center, inertia in subshapes_mci: 2874 total_mass += mass 2875 total_center = vecAdd(total_center, 2876 vecscalarMul(center, mass / total_mass)) 2877 total_inertia = matAdd(total_inertia, inertia) 2878 return total_mass, total_center, total_inertia
2879
2880 - class bhkNiTriStripsShape:
2882 """Returns a bhkPackedNiTriStripsShape block that is geometrically 2883 interchangeable. 2884 """ 2885 # get all vertices, triangles, and calculate normals 2886 vertices = [] 2887 normals = [] 2888 triangles = [] 2889 for strip in self.strips_data: 2890 triangles.extend( 2891 (tri1 + len(vertices), 2892 tri2 + len(vertices), 2893 tri3 + len(vertices)) 2894 for tri1, tri2, tri3 in strip.get_triangles()) 2895 vertices.extend( 2896 # scaling factor 1/7 applied in add_shape later 2897 vert.as_tuple() for vert in strip.vertices) 2898 normals.extend( 2899 (strip.vertices[tri2] - strip.vertices[tri1]).crossproduct( 2900 strip.vertices[tri3] - strip.vertices[tri1]) 2901 .normalized(ignore_error=True) 2902 .as_tuple() 2903 for tri1, tri2, tri3 in strip.get_triangles()) 2904 # create packed shape and add geometry 2905 packed = NifFormat.bhkPackedNiTriStripsShape() 2906 packed.add_shape( 2907 triangles=triangles, 2908 normals=normals, 2909 vertices=vertices, 2910 # default layer 1 (static collision) 2911 layer=self.data_layers[0].layer if self.data_layers else 1, 2912 material=self.material) 2913 # set unknowns 2914 packed.unknown_floats[2] = 0.1 2915 packed.unknown_floats[4] = 1.0 2916 packed.unknown_floats[5] = 1.0 2917 packed.unknown_floats[6] = 1.0 2918 packed.unknown_floats[8] = 0.1 2919 packed.scale = 1.0 2920 packed.unknown_floats_2[0] = 1.0 2921 packed.unknown_floats_2[1] = 1.0 2922 # return result 2923 return packed
2924
2925 - def get_mass_center_inertia(self, density = 1, solid = True):
2926 """Return mass, center, and inertia tensor.""" 2927 # first find mass, center, and inertia of all shapes 2928 subshapes_mci = [] 2929 for data in self.strips_data: 2930 subshapes_mci.append( 2931 pyffi.utils.inertia.get_mass_center_inertia_polyhedron( 2932 [ vert.as_tuple() for vert in data.vertices ], 2933 [ triangle for triangle in data.get_triangles() ], 2934 density = density, solid = solid)) 2935 2936 # now calculate mass, center, and inertia 2937 total_mass = 0 2938 total_center = (0, 0, 0) 2939 total_inertia = ((0, 0, 0), (0, 0, 0), (0, 0, 0)) 2940 for mass, center, inertia in subshapes_mci: 2941 total_mass += mass 2942 total_center = vecAdd(total_center, 2943 vecscalarMul(center, mass / total_mass)) 2944 total_inertia = matAdd(total_inertia, inertia) 2945 return total_mass, total_center, total_inertia
2946
2947 - class bhkPackedNiTriStripsShape:
2948 - def get_mass_center_inertia(self, density = 1, solid = True):
2949 """Return mass, center, and inertia tensor.""" 2950 return pyffi.utils.inertia.get_mass_center_inertia_polyhedron( 2951 [ vert.as_tuple() for vert in self.data.vertices ], 2952 [ ( hktriangle.triangle.v_1, 2953 hktriangle.triangle.v_2, 2954 hktriangle.triangle.v_3 ) 2955 for hktriangle in self.data.triangles ], 2956 density = density, solid = solid)
2957
2958 - def get_sub_shapes(self):
2959 """Return sub shapes (works for both Oblivion and Fallout 3).""" 2960 if self.data and self.data.sub_shapes: 2961 return self.data.sub_shapes 2962 else: 2963 return self.sub_shapes
2964
2965 - def add_shape(self, triangles, normals, vertices, layer = 0, material = 0):
2966 """Pack the given geometry.""" 2967 # add the shape data 2968 if not self.data: 2969 self.data = NifFormat.hkPackedNiTriStripsData() 2970 data = self.data 2971 # increase number of shapes 2972 num_shapes = self.num_sub_shapes 2973 self.num_sub_shapes = num_shapes + 1 2974 self.sub_shapes.update_size() 2975 data.num_sub_shapes = num_shapes + 1 2976 data.sub_shapes.update_size() 2977 # add the shape 2978 self.sub_shapes[num_shapes].layer = layer 2979 self.sub_shapes[num_shapes].num_vertices = len(vertices) 2980 self.sub_shapes[num_shapes].material = material 2981 data.sub_shapes[num_shapes].layer = layer 2982 data.sub_shapes[num_shapes].num_vertices = len(vertices) 2983 data.sub_shapes[num_shapes].material = material 2984 firsttriangle = data.num_triangles 2985 firstvertex = data.num_vertices 2986 data.num_triangles += len(triangles) 2987 data.triangles.update_size() 2988 for tdata, t, n in zip(data.triangles[firsttriangle:], triangles, normals): 2989 tdata.triangle.v_1 = t[0] + firstvertex 2990 tdata.triangle.v_2 = t[1] + firstvertex 2991 tdata.triangle.v_3 = t[2] + firstvertex 2992 tdata.normal.x = n[0] 2993 tdata.normal.y = n[1] 2994 tdata.normal.z = n[2] 2995 data.num_vertices += len(vertices) 2996 data.vertices.update_size() 2997 for vdata, v in zip(data.vertices[firstvertex:], vertices): 2998 vdata.x = v[0] / 7.0 2999 vdata.y = v[1] / 7.0 3000 vdata.z = v[2] / 7.0
3001
3002 - def get_vertex_hash_generator( 3003 self, 3004 vertexprecision=3, subshape_index=None):
3005 """Generator which produces a tuple of integers for each 3006 vertex to ease detection of duplicate/close enough to remove 3007 vertices. The precision parameter denote number of 3008 significant digits behind the comma. 3009 3010 For vertexprecision, 3 seems usually enough (maybe we'll 3011 have to increase this at some point). 3012 3013 >>> shape = NifFormat.bhkPackedNiTriStripsShape() 3014 >>> data = NifFormat.hkPackedNiTriStripsData() 3015 >>> shape.data = data 3016 >>> shape.num_sub_shapes = 2 3017 >>> shape.sub_shapes.update_size() 3018 >>> data.num_vertices = 3 3019 >>> shape.sub_shapes[0].num_vertices = 2 3020 >>> shape.sub_shapes[1].num_vertices = 1 3021 >>> data.vertices.update_size() 3022 >>> data.vertices[0].x = 0.0 3023 >>> data.vertices[0].y = 0.1 3024 >>> data.vertices[0].z = 0.2 3025 >>> data.vertices[1].x = 1.0 3026 >>> data.vertices[1].y = 1.1 3027 >>> data.vertices[1].z = 1.2 3028 >>> data.vertices[2].x = 2.0 3029 >>> data.vertices[2].y = 2.1 3030 >>> data.vertices[2].z = 2.2 3031 >>> list(shape.get_vertex_hash_generator()) 3032 [(0, (0, 100, 200)), (0, (1000, 1100, 1200)), (1, (2000, 2100, 2200))] 3033 >>> list(shape.get_vertex_hash_generator(subshape_index=0)) 3034 [(0, 100, 200), (1000, 1100, 1200)] 3035 >>> list(shape.get_vertex_hash_generator(subshape_index=1)) 3036 [(2000, 2100, 2200)] 3037 3038 :param vertexprecision: Precision to be used for vertices. 3039 :type vertexprecision: float 3040 :return: A generator yielding a hash value for each vertex. 3041 """ 3042 vertexfactor = 10 ** vertexprecision 3043 if subshape_index is None: 3044 for matid, vert in izip(chain(*[repeat(i, sub_shape.num_vertices) 3045 for i, sub_shape 3046 in enumerate(self.get_sub_shapes())]), 3047 self.data.vertices): 3048 yield (matid, tuple(float_to_int(value * vertexfactor) 3049 for value in vert.as_list())) 3050 else: 3051 first_vertex = 0 3052 for i, subshape in izip(xrange(subshape_index), 3053 self.get_sub_shapes()): 3054 first_vertex += subshape.num_vertices 3055 for vert_index in xrange( 3056 first_vertex, 3057 first_vertex 3058 + self.get_sub_shapes()[subshape_index].num_vertices): 3059 yield tuple(float_to_int(value * vertexfactor) 3060 for value 3061 in self.data.vertices[vert_index].as_list())
3062
3064 """Generator which produces a tuple of integers, or None 3065 in degenerate case, for each triangle to ease detection of 3066 duplicate triangles. 3067 3068 >>> shape = NifFormat.bhkPackedNiTriStripsShape() 3069 >>> data = NifFormat.hkPackedNiTriStripsData() 3070 >>> shape.data = data 3071 >>> data.num_triangles = 6 3072 >>> data.triangles.update_size() 3073 >>> data.triangles[0].triangle.v_1 = 0 3074 >>> data.triangles[0].triangle.v_2 = 1 3075 >>> data.triangles[0].triangle.v_3 = 2 3076 >>> data.triangles[1].triangle.v_1 = 2 3077 >>> data.triangles[1].triangle.v_2 = 1 3078 >>> data.triangles[1].triangle.v_3 = 3 3079 >>> data.triangles[2].triangle.v_1 = 3 3080 >>> data.triangles[2].triangle.v_2 = 2 3081 >>> data.triangles[2].triangle.v_3 = 1 3082 >>> data.triangles[3].triangle.v_1 = 3 3083 >>> data.triangles[3].triangle.v_2 = 1 3084 >>> data.triangles[3].triangle.v_3 = 2 3085 >>> data.triangles[4].triangle.v_1 = 0 3086 >>> data.triangles[4].triangle.v_2 = 0 3087 >>> data.triangles[4].triangle.v_3 = 3 3088 >>> data.triangles[5].triangle.v_1 = 1 3089 >>> data.triangles[5].triangle.v_2 = 3 3090 >>> data.triangles[5].triangle.v_3 = 4 3091 >>> list(shape.get_triangle_hash_generator()) 3092 [(0, 1, 2), (1, 3, 2), (1, 3, 2), (1, 2, 3), None, (1, 3, 4)] 3093 3094 :return: A generator yielding a hash value for each triangle. 3095 """ 3096 for tri in self.data.triangles: 3097 v_1, v_2, v_3 = tri.triangle.v_1, tri.triangle.v_2, tri.triangle.v_3 3098 if v_1 == v_2 or v_2 == v_3 or v_3 == v_1: 3099 # degenerate 3100 yield None 3101 elif v_1 < v_2 and v_1 < v_3: 3102 # v_1 smallest 3103 yield v_1, v_2, v_3 3104 elif v_2 < v_1 and v_2 < v_3: 3105 # v_2 smallest 3106 yield v_2, v_3, v_1 3107 else: 3108 # v_3 smallest 3109 yield v_3, v_1, v_2
3110
3111 - class bhkRagdollConstraint:
3112 - def apply_scale(self, scale):
3113 """Scale data.""" 3114 # apply scale on transform 3115 self.ragdoll.pivot_a.x *= scale 3116 self.ragdoll.pivot_a.y *= scale 3117 self.ragdoll.pivot_a.z *= scale 3118 self.ragdoll.pivot_b.x *= scale 3119 self.ragdoll.pivot_b.y *= scale 3120 self.ragdoll.pivot_b.z *= scale
3121
3122 - def update_a_b(self, parent):
3123 """Update the B data from the A data.""" 3124 self.ragdoll.update_a_b(self.get_transform_a_b(parent))
3125
3126 - class bhkRefObject:
3127 - def get_shape_mass_center_inertia(self, density=1, solid=True):
3128 """Return mass, center of gravity, and inertia tensor of 3129 this object's shape, if self.shape is not None. 3130 3131 If self.shape is None, then returns zeros for everything. 3132 """ 3133 if not self.shape: 3134 mass = 0 3135 center = (0, 0, 0) 3136 inertia = ((0, 0, 0), (0, 0, 0), (0, 0, 0)) 3137 else: 3138 mass, center, inertia = self.shape.get_mass_center_inertia( 3139 density=density, solid=solid) 3140 return mass, center, inertia
3141
3142 - class bhkRigidBody:
3143 - def apply_scale(self, scale):
3144 """Apply scale factor <scale> on data.""" 3145 # apply scale on transform 3146 self.translation.x *= scale 3147 self.translation.y *= scale 3148 self.translation.z *= scale 3149 3150 # apply scale on center of gravity 3151 self.center.x *= scale 3152 self.center.y *= scale 3153 self.center.z *= scale 3154 3155 # apply scale on inertia tensor 3156 self.inertia.m_11 *= (scale ** 2) 3157 self.inertia.m_12 *= (scale ** 2) 3158 self.inertia.m_13 *= (scale ** 2) 3159 self.inertia.m_14 *= (scale ** 2) 3160 self.inertia.m_21 *= (scale ** 2) 3161 self.inertia.m_22 *= (scale ** 2) 3162 self.inertia.m_23 *= (scale ** 2) 3163 self.inertia.m_24 *= (scale ** 2) 3164 self.inertia.m_31 *= (scale ** 2) 3165 self.inertia.m_32 *= (scale ** 2) 3166 self.inertia.m_33 *= (scale ** 2) 3167 self.inertia.m_34 *= (scale ** 2)
3168
3169 - def update_mass_center_inertia(self, density=1, solid=True, mass=None):
3170 """Look at all the objects under this rigid body and update the mass, 3171 center of gravity, and inertia tensor accordingly. If the C{mass} parameter 3172 is given then the C{density} argument is ignored.""" 3173 if not mass is None: 3174 density = 1 3175 3176 calc_mass, center, inertia = self.get_shape_mass_center_inertia( 3177 density=density, solid=solid) 3178 3179 self.mass = calc_mass 3180 self.center.x, self.center.y, self.center.z = center 3181 self.inertia.m_11 = inertia[0][0] 3182 self.inertia.m_12 = inertia[0][1] 3183 self.inertia.m_13 = inertia[0][2] 3184 self.inertia.m_14 = 0 3185 self.inertia.m_21 = inertia[1][0] 3186 self.inertia.m_22 = inertia[1][1] 3187 self.inertia.m_23 = inertia[1][2] 3188 self.inertia.m_24 = 0 3189 self.inertia.m_31 = inertia[2][0] 3190 self.inertia.m_32 = inertia[2][1] 3191 self.inertia.m_33 = inertia[2][2] 3192 self.inertia.m_34 = 0 3193 3194 if not mass is None: 3195 mass_correction = mass / calc_mass if calc_mass != 0 else 1 3196 self.mass = mass 3197 self.inertia.m_11 *= mass_correction 3198 self.inertia.m_12 *= mass_correction 3199 self.inertia.m_13 *= mass_correction 3200 self.inertia.m_14 *= mass_correction 3201 self.inertia.m_21 *= mass_correction 3202 self.inertia.m_22 *= mass_correction 3203 self.inertia.m_23 *= mass_correction 3204 self.inertia.m_24 *= mass_correction 3205 self.inertia.m_31 *= mass_correction 3206 self.inertia.m_32 *= mass_correction 3207 self.inertia.m_33 *= mass_correction 3208 self.inertia.m_34 *= mass_correction
3209
3210 - class bhkSphereShape:
3211 - def apply_scale(self, scale):
3212 """Apply scale factor <scale> on data.""" 3213 # apply scale on dimensions 3214 self.radius *= scale
3215
3216 - def get_mass_center_inertia(self, density = 1, solid = True):
3217 """Return mass, center, and inertia tensor.""" 3218 # the dimensions describe half the size of the box in each dimension 3219 # so the length of a single edge is dimension.dir * 2 3220 mass, inertia = pyffi.utils.inertia.getMassInertiaSphere( 3221 self.radius, density = density, solid = solid) 3222 return mass, (0,0,0), inertia
3223
3224 - class bhkTransformShape:
3225 - def apply_scale(self, scale):
3226 """Apply scale factor <scale> on data.""" 3227 # apply scale on translation 3228 self.transform.m_14 *= scale 3229 self.transform.m_24 *= scale 3230 self.transform.m_34 *= scale
3231
3232 - def get_mass_center_inertia(self, density=1, solid=True):
3233 """Return mass, center, and inertia tensor.""" 3234 # get shape mass, center, and inertia 3235 mass, center, inertia = self.get_shape_mass_center_inertia( 3236 density=density, solid=solid) 3237 # get transform matrix and translation vector 3238 transform = self.transform.get_matrix_33().as_tuple() 3239 transform_transposed = matTransposed(transform) 3240 translation = ( self.transform.m_14, self.transform.m_24, self.transform.m_34 ) 3241 # transform center and inertia 3242 center = matvecMul(transform, center) 3243 center = vecAdd(center, translation) 3244 inertia = matMul(matMul(transform_transposed, inertia), transform) 3245 # return updated mass center and inertia 3246 return mass, center, inertia
3247
3248 - class BSBound:
3249 - def apply_scale(self, scale):
3250 """Scale data.""" 3251 self.center.x *= scale 3252 self.center.y *= scale 3253 self.center.z *= scale 3254 self.dimensions.x *= scale 3255 self.dimensions.y *= scale 3256 self.dimensions.z *= scale
3257
3258 - class BSDismemberSkinInstance:
3259 - def get_dismember_partitions(self):
3260 """Return triangles and body part indices.""" 3261 triangles = [] 3262 trianglepartmap = [] 3263 for bodypart, skinpartblock in zip( 3264 self.partitions, self.skin_partition.skin_partition_blocks): 3265 part_triangles = list(skinpartblock.get_mapped_triangles()) 3266 triangles += part_triangles 3267 trianglepartmap += [bodypart.body_part] * len(part_triangles) 3268 return triangles, trianglepartmap
3269 3378
3379 - class hkPackedNiTriStripsData:
3380 - def apply_scale(self, scale):
3381 """Apply scale factor on data.""" 3382 if abs(scale - 1.0) < NifFormat.EPSILON: 3383 return 3384 for vert in self.vertices: 3385 vert.x *= scale 3386 vert.y *= scale 3387 vert.z *= scale
3388
3389 - class InertiaMatrix:
3390 - def as_list(self):
3391 """Return matrix as 3x3 list.""" 3392 return [ 3393 [self.m_11, self.m_12, self.m_13], 3394 [self.m_21, self.m_22, self.m_23], 3395 [self.m_31, self.m_32, self.m_33] 3396 ]
3397
3398 - def as_tuple(self):
3399 """Return matrix as 3x3 tuple.""" 3400 return ( 3401 (self.m_11, self.m_12, self.m_13), 3402 (self.m_21, self.m_22, self.m_23), 3403 (self.m_31, self.m_32, self.m_33) 3404 )
3405
3406 - def __str__(self):
3407 return( 3408 "[ %6.3f %6.3f %6.3f ]\n" 3409 "[ %6.3f %6.3f %6.3f ]\n" 3410 "[ %6.3f %6.3f %6.3f ]\n" 3411 % (self.m_11, self.m_12, self.m_13, 3412 self.m_21, self.m_22, self.m_23, 3413 self.m_31, self.m_32, self.m_33))
3414
3415 - def set_identity(self):
3416 """Set to identity matrix.""" 3417 self.m_11 = 1.0 3418 self.m_12 = 0.0 3419 self.m_13 = 0.0 3420 self.m_14 = 0.0 3421 self.m_21 = 0.0 3422 self.m_22 = 1.0 3423 self.m_23 = 0.0 3424 self.m_24 = 0.0 3425 self.m_31 = 0.0 3426 self.m_32 = 0.0 3427 self.m_33 = 1.0 3428 self.m_34 = 0.0
3429
3430 - def is_identity(self):
3431 """Return ``True`` if the matrix is close to identity.""" 3432 if (abs(self.m_11 - 1.0) > NifFormat.EPSILON 3433 or abs(self.m_12) > NifFormat.EPSILON 3434 or abs(self.m_13) > NifFormat.EPSILON 3435 or abs(self.m_21) > NifFormat.EPSILON 3436 or abs(self.m_22 - 1.0) > NifFormat.EPSILON 3437 or abs(self.m_23) > NifFormat.EPSILON 3438 or abs(self.m_31) > NifFormat.EPSILON 3439 or abs(self.m_32) > NifFormat.EPSILON 3440 or abs(self.m_33 - 1.0) > NifFormat.EPSILON): 3441 return False 3442 else: 3443 return True
3444
3445 - def get_copy(self):
3446 """Return a copy of the matrix.""" 3447 mat = NifFormat.InertiaMatrix() 3448 mat.m_11 = self.m_11 3449 mat.m_12 = self.m_12 3450 mat.m_13 = self.m_13 3451 mat.m_14 = self.m_14 3452 mat.m_21 = self.m_21 3453 mat.m_22 = self.m_22 3454 mat.m_23 = self.m_23 3455 mat.m_24 = self.m_24 3456 mat.m_31 = self.m_31 3457 mat.m_32 = self.m_32 3458 mat.m_33 = self.m_33 3459 mat.m_34 = self.m_34 3460 return mat
3461
3462 - def __eq__(self, mat):
3463 if not isinstance(mat, NifFormat.InertiaMatrix): 3464 raise TypeError( 3465 "do not know how to compare InertiaMatrix and %s"%mat.__class__) 3466 if (abs(self.m_11 - mat.m_11) > NifFormat.EPSILON 3467 or abs(self.m_12 - mat.m_12) > NifFormat.EPSILON 3468 or abs(self.m_13 - mat.m_13) > NifFormat.EPSILON 3469 or abs(self.m_21 - mat.m_21) > NifFormat.EPSILON 3470 or abs(self.m_22 - mat.m_22) > NifFormat.EPSILON 3471 or abs(self.m_23 - mat.m_23) > NifFormat.EPSILON 3472 or abs(self.m_31 - mat.m_31) > NifFormat.EPSILON 3473 or abs(self.m_32 - mat.m_32) > NifFormat.EPSILON 3474 or abs(self.m_33 - mat.m_33) > NifFormat.EPSILON): 3475 return False 3476 return True
3477
3478 - def __ne__(self, mat):
3479 return not self.__eq__(mat)
3480
3481 - class LimitedHingeDescriptor:
3482 - def update_a_b(self, transform):
3483 """Update B pivot and axes from A using the given transform.""" 3484 # pivot point 3485 pivot_b = ((7 * self.pivot_a.get_vector_3()) * transform) / 7.0 3486 self.pivot_b.x = pivot_b.x 3487 self.pivot_b.y = pivot_b.y 3488 self.pivot_b.z = pivot_b.z 3489 # axes (rotation only) 3490 transform = transform.get_matrix_33() 3491 axle_b = self.axle_a.get_vector_3() * transform 3492 perp_2_axle_in_b_2 = self.perp_2_axle_in_a_2.get_vector_3() * transform 3493 self.axle_b.x = axle_b.x 3494 self.axle_b.y = axle_b.y 3495 self.axle_b.z = axle_b.z 3496 self.perp_2_axle_in_b_2.x = perp_2_axle_in_b_2.x 3497 self.perp_2_axle_in_b_2.y = perp_2_axle_in_b_2.y 3498 self.perp_2_axle_in_b_2.z = perp_2_axle_in_b_2.z
3499
3500 - class Matrix44:
3501 - def as_list(self):
3502 """Return matrix as 4x4 list.""" 3503 return [ 3504 [self.m_11, self.m_12, self.m_13, self.m_14], 3505 [self.m_21, self.m_22, self.m_23, self.m_24], 3506 [self.m_31, self.m_32, self.m_33, self.m_34], 3507 [self.m_41, self.m_42, self.m_43, self.m_44] 3508 ]
3509
3510 - def as_tuple(self):
3511 """Return matrix as 4x4 tuple.""" 3512 return ( 3513 (self.m_11, self.m_12, self.m_13, self.m_14), 3514 (self.m_21, self.m_22, self.m_23, self.m_24), 3515 (self.m_31, self.m_32, self.m_33, self.m_34), 3516 (self.m_41, self.m_42, self.m_43, self.m_44) 3517 )
3518
3519 - def set_rows(self, row0, row1, row2, row3):
3520 """Set matrix from rows.""" 3521 self.m_11, self.m_12, self.m_13, self.m_14 = row0 3522 self.m_21, self.m_22, self.m_23, self.m_24 = row1 3523 self.m_31, self.m_32, self.m_33, self.m_34 = row2 3524 self.m_41, self.m_42, self.m_43, self.m_44 = row3
3525
3526 - def __str__(self):
3527 return( 3528 "[ %6.3f %6.3f %6.3f %6.3f ]\n" 3529 "[ %6.3f %6.3f %6.3f %6.3f ]\n" 3530 "[ %6.3f %6.3f %6.3f %6.3f ]\n" 3531 "[ %6.3f %6.3f %6.3f %6.3f ]\n" 3532 % (self.m_11, self.m_12, self.m_13, self.m_14, 3533 self.m_21, self.m_22, self.m_23, self.m_24, 3534 self.m_31, self.m_32, self.m_33, self.m_34, 3535 self.m_41, self.m_42, self.m_43, self.m_44))
3536
3537 - def set_identity(self):
3538 """Set to identity matrix.""" 3539 self.m_11 = 1.0 3540 self.m_12 = 0.0 3541 self.m_13 = 0.0 3542 self.m_14 = 0.0 3543 self.m_21 = 0.0 3544 self.m_22 = 1.0 3545 self.m_23 = 0.0 3546 self.m_24 = 0.0 3547 self.m_31 = 0.0 3548 self.m_32 = 0.0 3549 self.m_33 = 1.0 3550 self.m_34 = 0.0 3551 self.m_41 = 0.0 3552 self.m_42 = 0.0 3553 self.m_43 = 0.0 3554 self.m_44 = 1.0
3555
3556 - def is_identity(self):
3557 """Return ``True`` if the matrix is close to identity.""" 3558 if (abs(self.m_11 - 1.0) > NifFormat.EPSILON 3559 or abs(self.m_12) > NifFormat.EPSILON 3560 or abs(self.m_13) > NifFormat.EPSILON 3561 or abs(self.m_14) > NifFormat.EPSILON 3562 or abs(self.m_21) > NifFormat.EPSILON 3563 or abs(self.m_22 - 1.0) > NifFormat.EPSILON 3564 or abs(self.m_23) > NifFormat.EPSILON 3565 or abs(self.m_24) > NifFormat.EPSILON 3566 or abs(self.m_31) > NifFormat.EPSILON 3567 or abs(self.m_32) > NifFormat.EPSILON 3568 or abs(self.m_33 - 1.0) > NifFormat.EPSILON 3569 or abs(self.m_34) > NifFormat.EPSILON 3570 or abs(self.m_41) > NifFormat.EPSILON 3571 or abs(self.m_42) > NifFormat.EPSILON 3572 or abs(self.m_43) > NifFormat.EPSILON 3573 or abs(self.m_44 - 1.0) > NifFormat.EPSILON): 3574 return False 3575 else: 3576 return True
3577
3578 - def get_copy(self):
3579 """Create a copy of the matrix.""" 3580 mat = NifFormat.Matrix44() 3581 mat.m_11 = self.m_11 3582 mat.m_12 = self.m_12 3583 mat.m_13 = self.m_13 3584 mat.m_14 = self.m_14 3585 mat.m_21 = self.m_21 3586 mat.m_22 = self.m_22 3587 mat.m_23 = self.m_23 3588 mat.m_24 = self.m_24 3589 mat.m_31 = self.m_31 3590 mat.m_32 = self.m_32 3591 mat.m_33 = self.m_33 3592 mat.m_34 = self.m_34 3593 mat.m_41 = self.m_41 3594 mat.m_42 = self.m_42 3595 mat.m_43 = self.m_43 3596 mat.m_44 = self.m_44 3597 return mat
3598
3599 - def get_matrix_33(self):
3600 """Returns upper left 3x3 part.""" 3601 m = NifFormat.Matrix33() 3602 m.m_11 = self.m_11 3603 m.m_12 = self.m_12 3604 m.m_13 = self.m_13 3605 m.m_21 = self.m_21 3606 m.m_22 = self.m_22 3607 m.m_23 = self.m_23 3608 m.m_31 = self.m_31 3609 m.m_32 = self.m_32 3610 m.m_33 = self.m_33 3611 return m
3612
3613 - def set_matrix_33(self, m):
3614 """Sets upper left 3x3 part.""" 3615 if not isinstance(m, NifFormat.Matrix33): 3616 raise TypeError('argument must be Matrix33') 3617 self.m_11 = m.m_11 3618 self.m_12 = m.m_12 3619 self.m_13 = m.m_13 3620 self.m_21 = m.m_21 3621 self.m_22 = m.m_22 3622 self.m_23 = m.m_23 3623 self.m_31 = m.m_31 3624 self.m_32 = m.m_32 3625 self.m_33 = m.m_33
3626
3627 - def get_translation(self):
3628 """Returns lower left 1x3 part.""" 3629 t = NifFormat.Vector3() 3630 t.x = self.m_41 3631 t.y = self.m_42 3632 t.z = self.m_43 3633 return t
3634
3635 - def set_translation(self, translation):
3636 """Returns lower left 1x3 part.""" 3637 if not isinstance(translation, NifFormat.Vector3): 3638 raise TypeError('argument must be Vector3') 3639 self.m_41 = translation.x 3640 self.m_42 = translation.y 3641 self.m_43 = translation.z
3642
3644 if not self.get_matrix_33().is_scale_rotation(): return False 3645 if abs(self.m_14) > NifFormat.EPSILON: return False 3646 if abs(self.m_24) > NifFormat.EPSILON: return False 3647 if abs(self.m_34) > NifFormat.EPSILON: return False 3648 if abs(self.m_44 - 1.0) > NifFormat.EPSILON: return False 3649 return True
3650
3652 rotscl = self.get_matrix_33() 3653 scale = rotscl.get_scale() 3654 rot = rotscl / scale 3655 trans = self.get_translation() 3656 return (scale, rot, trans)
3657
3658 - def get_scale_quat_translation(self):
3659 rotscl = self.get_matrix_33() 3660 scale, quat = rotscl.get_scale_quat() 3661 trans = self.get_translation() 3662 return (scale, quat, trans)
3663
3664 - def set_scale_rotation_translation(self, scale, rotation, translation):
3665 if not isinstance(scale, (float, int, long)): 3666 raise TypeError('scale must be float') 3667 if not isinstance(rotation, NifFormat.Matrix33): 3668 raise TypeError('rotation must be Matrix33') 3669 if not isinstance(translation, NifFormat.Vector3): 3670 raise TypeError('translation must be Vector3') 3671 3672 if not rotation.is_rotation(): 3673 logger = logging.getLogger("pyffi.nif.matrix") 3674 mat = rotation * rotation.get_transpose() 3675 idmat = NifFormat.Matrix33() 3676 idmat.set_identity() 3677 error = (mat - idmat).sup_norm() 3678 logger.warning("improper rotation matrix (error is %f)" % error) 3679 logger.debug(" matrix =") 3680 for line in str(rotation).split("\n"): 3681 logger.debug(" %s" % line) 3682 logger.debug(" its determinant = %f" % rotation.get_determinant()) 3683 logger.debug(" matrix * matrix^T =") 3684 for line in str(mat).split("\n"): 3685 logger.debug(" %s" % line) 3686 3687 self.m_14 = 0.0 3688 self.m_24 = 0.0 3689 self.m_34 = 0.0 3690 self.m_44 = 1.0 3691 3692 self.set_matrix_33(rotation * scale) 3693 self.set_translation(translation)
3694
3695 - def get_inverse(self, fast=True):
3696 """Calculates inverse (fast assumes is_scale_rotation_translation is True).""" 3697 def adjoint(m, ii, jj): 3698 result = [] 3699 for i, row in enumerate(m): 3700 if i == ii: continue 3701 result.append([]) 3702 for j, x in enumerate(row): 3703 if j == jj: continue 3704 result[-1].append(x) 3705 return result
3706 def determinant(m): 3707 if len(m) == 2: 3708 return m[0][0]*m[1][1] - m[1][0]*m[0][1] 3709 result = 0.0 3710 for i in xrange(len(m)): 3711 det = determinant(adjoint(m, i, 0)) 3712 if i & 1: 3713 result -= m[i][0] * det 3714 else: 3715 result += m[i][0] * det 3716 return result
3717 3718 if fast: 3719 m = self.get_matrix_33().get_inverse() 3720 t = -(self.get_translation() * m) 3721 3722 n = NifFormat.Matrix44() 3723 n.m_14 = 0.0 3724 n.m_24 = 0.0 3725 n.m_34 = 0.0 3726 n.m_44 = 1.0 3727 n.set_matrix_33(m) 3728 n.set_translation(t) 3729 return n 3730 else: 3731 m = self.as_list() 3732 nn = [[0.0 for i in xrange(4)] for j in xrange(4)] 3733 det = determinant(m) 3734 if abs(det) < NifFormat.EPSILON: 3735 raise ZeroDivisionError('cannot invert matrix:\n%s'%self) 3736 for i in xrange(4): 3737 for j in xrange(4): 3738 if (i+j) & 1: 3739 nn[j][i] = -determinant(adjoint(m, i, j)) / det 3740 else: 3741 nn[j][i] = determinant(adjoint(m, i, j)) / det 3742 n = NifFormat.Matrix44() 3743 n.set_rows(*nn) 3744 return n 3745
3746 - def __mul__(self, x):
3747 if isinstance(x, (float, int, long)): 3748 m = NifFormat.Matrix44() 3749 m.m_11 = self.m_11 * x 3750 m.m_12 = self.m_12 * x 3751 m.m_13 = self.m_13 * x 3752 m.m_14 = self.m_14 * x 3753 m.m_21 = self.m_21 * x 3754 m.m_22 = self.m_22 * x 3755 m.m_23 = self.m_23 * x 3756 m.m_24 = self.m_24 * x 3757 m.m_31 = self.m_31 * x 3758 m.m_32 = self.m_32 * x 3759 m.m_33 = self.m_33 * x 3760 m.m_34 = self.m_34 * x 3761 m.m_41 = self.m_41 * x 3762 m.m_42 = self.m_42 * x 3763 m.m_43 = self.m_43 * x 3764 m.m_44 = self.m_44 * x 3765 return m 3766 elif isinstance(x, NifFormat.Vector3): 3767 raise TypeError("matrix*vector not supported; please use left multiplication (vector*matrix)") 3768 elif isinstance(x, NifFormat.Vector4): 3769 raise TypeError("matrix*vector not supported; please use left multiplication (vector*matrix)") 3770 elif isinstance(x, NifFormat.Matrix44): 3771 m = NifFormat.Matrix44() 3772 m.m_11 = self.m_11 * x.m_11 + self.m_12 * x.m_21 + self.m_13 * x.m_31 + self.m_14 * x.m_41 3773 m.m_12 = self.m_11 * x.m_12 + self.m_12 * x.m_22 + self.m_13 * x.m_32 + self.m_14 * x.m_42 3774 m.m_13 = self.m_11 * x.m_13 + self.m_12 * x.m_23 + self.m_13 * x.m_33 + self.m_14 * x.m_43 3775 m.m_14 = self.m_11 * x.m_14 + self.m_12 * x.m_24 + self.m_13 * x.m_34 + self.m_14 * x.m_44 3776 m.m_21 = self.m_21 * x.m_11 + self.m_22 * x.m_21 + self.m_23 * x.m_31 + self.m_24 * x.m_41 3777 m.m_22 = self.m_21 * x.m_12 + self.m_22 * x.m_22 + self.m_23 * x.m_32 + self.m_24 * x.m_42 3778 m.m_23 = self.m_21 * x.m_13 + self.m_22 * x.m_23 + self.m_23 * x.m_33 + self.m_24 * x.m_43 3779 m.m_24 = self.m_21 * x.m_14 + self.m_22 * x.m_24 + self.m_23 * x.m_34 + self.m_24 * x.m_44 3780 m.m_31 = self.m_31 * x.m_11 + self.m_32 * x.m_21 + self.m_33 * x.m_31 + self.m_34 * x.m_41 3781 m.m_32 = self.m_31 * x.m_12 + self.m_32 * x.m_22 + self.m_33 * x.m_32 + self.m_34 * x.m_42 3782 m.m_33 = self.m_31 * x.m_13 + self.m_32 * x.m_23 + self.m_33 * x.m_33 + self.m_34 * x.m_43 3783 m.m_34 = self.m_31 * x.m_14 + self.m_32 * x.m_24 + self.m_33 * x.m_34 + self.m_34 * x.m_44 3784 m.m_41 = self.m_41 * x.m_11 + self.m_42 * x.m_21 + self.m_43 * x.m_31 + self.m_44 * x.m_41 3785 m.m_42 = self.m_41 * x.m_12 + self.m_42 * x.m_22 + self.m_43 * x.m_32 + self.m_44 * x.m_42 3786 m.m_43 = self.m_41 * x.m_13 + self.m_42 * x.m_23 + self.m_43 * x.m_33 + self.m_44 * x.m_43 3787 m.m_44 = self.m_41 * x.m_14 + self.m_42 * x.m_24 + self.m_43 * x.m_34 + self.m_44 * x.m_44 3788 return m 3789 else: 3790 raise TypeError("do not know how to multiply Matrix44 with %s"%x.__class__)
3791
3792 - def __div__(self, x):
3793 if isinstance(x, (float, int, long)): 3794 m = NifFormat.Matrix44() 3795 m.m_11 = self.m_11 / x 3796 m.m_12 = self.m_12 / x 3797 m.m_13 = self.m_13 / x 3798 m.m_14 = self.m_14 / x 3799 m.m_21 = self.m_21 / x 3800 m.m_22 = self.m_22 / x 3801 m.m_23 = self.m_23 / x 3802 m.m_24 = self.m_24 / x 3803 m.m_31 = self.m_31 / x 3804 m.m_32 = self.m_32 / x 3805 m.m_33 = self.m_33 / x 3806 m.m_34 = self.m_34 / x 3807 m.m_41 = self.m_41 / x 3808 m.m_42 = self.m_42 / x 3809 m.m_43 = self.m_43 / x 3810 m.m_44 = self.m_44 / x 3811 return m 3812 else: 3813 raise TypeError("do not know how to divide Matrix44 by %s"%x.__class__)
3814 3815 # py3k 3816 __truediv__ = __div__ 3817
3818 - def __rmul__(self, x):
3819 if isinstance(x, (float, int, long)): 3820 return self * x 3821 else: 3822 raise TypeError("do not know how to multiply %s with Matrix44"%x.__class__)
3823
3824 - def __eq__(self, m):
3825 if isinstance(m, type(None)): 3826 return False 3827 if not isinstance(m, NifFormat.Matrix44): 3828 raise TypeError("do not know how to compare Matrix44 and %s"%m.__class__) 3829 if abs(self.m_11 - m.m_11) > NifFormat.EPSILON: return False 3830 if abs(self.m_12 - m.m_12) > NifFormat.EPSILON: return False 3831 if abs(self.m_13 - m.m_13) > NifFormat.EPSILON: return False 3832 if abs(self.m_14 - m.m_14) > NifFormat.EPSILON: return False 3833 if abs(self.m_21 - m.m_21) > NifFormat.EPSILON: return False 3834 if abs(self.m_22 - m.m_22) > NifFormat.EPSILON: return False 3835 if abs(self.m_23 - m.m_23) > NifFormat.EPSILON: return False 3836 if abs(self.m_24 - m.m_24) > NifFormat.EPSILON: return False 3837 if abs(self.m_31 - m.m_31) > NifFormat.EPSILON: return False 3838 if abs(self.m_32 - m.m_32) > NifFormat.EPSILON: return False 3839 if abs(self.m_33 - m.m_33) > NifFormat.EPSILON: return False 3840 if abs(self.m_34 - m.m_34) > NifFormat.EPSILON: return False 3841 if abs(self.m_41 - m.m_41) > NifFormat.EPSILON: return False 3842 if abs(self.m_42 - m.m_42) > NifFormat.EPSILON: return False 3843 if abs(self.m_43 - m.m_43) > NifFormat.EPSILON: return False 3844 if abs(self.m_44 - m.m_44) > NifFormat.EPSILON: return False 3845 return True
3846
3847 - def __ne__(self, m):
3848 return not self.__eq__(m)
3849
3850 - def __add__(self, x):
3851 if isinstance(x, (NifFormat.Matrix44)): 3852 m = NifFormat.Matrix44() 3853 m.m_11 = self.m_11 + x.m_11 3854 m.m_12 = self.m_12 + x.m_12 3855 m.m_13 = self.m_13 + x.m_13 3856 m.m_14 = self.m_14 + x.m_14 3857 m.m_21 = self.m_21 + x.m_21 3858 m.m_22 = self.m_22 + x.m_22 3859 m.m_23 = self.m_23 + x.m_23 3860 m.m_24 = self.m_24 + x.m_24 3861 m.m_31 = self.m_31 + x.m_31 3862 m.m_32 = self.m_32 + x.m_32 3863 m.m_33 = self.m_33 + x.m_33 3864 m.m_34 = self.m_34 + x.m_34 3865 m.m_41 = self.m_41 + x.m_41 3866 m.m_42 = self.m_42 + x.m_42 3867 m.m_43 = self.m_43 + x.m_43 3868 m.m_44 = self.m_44 + x.m_44 3869 return m 3870 elif isinstance(x, (int, long, float)): 3871 m = NifFormat.Matrix44() 3872 m.m_11 = self.m_11 + x 3873 m.m_12 = self.m_12 + x 3874 m.m_13 = self.m_13 + x 3875 m.m_14 = self.m_14 + x 3876 m.m_21 = self.m_21 + x 3877 m.m_22 = self.m_22 + x 3878 m.m_23 = self.m_23 + x 3879 m.m_24 = self.m_24 + x 3880 m.m_31 = self.m_31 + x 3881 m.m_32 = self.m_32 + x 3882 m.m_33 = self.m_33 + x 3883 m.m_34 = self.m_34 + x 3884 m.m_41 = self.m_41 + x 3885 m.m_42 = self.m_42 + x 3886 m.m_43 = self.m_43 + x 3887 m.m_44 = self.m_44 + x 3888 return m 3889 else: 3890 raise TypeError("do not know how to add Matrix44 and %s"%x.__class__)
3891
3892 - def __sub__(self, x):
3893 if isinstance(x, (NifFormat.Matrix44)): 3894 m = NifFormat.Matrix44() 3895 m.m_11 = self.m_11 - x.m_11 3896 m.m_12 = self.m_12 - x.m_12 3897 m.m_13 = self.m_13 - x.m_13 3898 m.m_14 = self.m_14 - x.m_14 3899 m.m_21 = self.m_21 - x.m_21 3900 m.m_22 = self.m_22 - x.m_22 3901 m.m_23 = self.m_23 - x.m_23 3902 m.m_24 = self.m_24 - x.m_24 3903 m.m_31 = self.m_31 - x.m_31 3904 m.m_32 = self.m_32 - x.m_32 3905 m.m_33 = self.m_33 - x.m_33 3906 m.m_34 = self.m_34 - x.m_34 3907 m.m_41 = self.m_41 - x.m_41 3908 m.m_42 = self.m_42 - x.m_42 3909 m.m_43 = self.m_43 - x.m_43 3910 m.m_44 = self.m_44 - x.m_44 3911 return m 3912 elif isinstance(x, (int, long, float)): 3913 m = NifFormat.Matrix44() 3914 m.m_11 = self.m_11 - x 3915 m.m_12 = self.m_12 - x 3916 m.m_13 = self.m_13 - x 3917 m.m_14 = self.m_14 - x 3918 m.m_21 = self.m_21 - x 3919 m.m_22 = self.m_22 - x 3920 m.m_23 = self.m_23 - x 3921 m.m_24 = self.m_24 - x 3922 m.m_31 = self.m_31 - x 3923 m.m_32 = self.m_32 - x 3924 m.m_33 = self.m_33 - x 3925 m.m_34 = self.m_34 - x 3926 m.m_41 = self.m_41 - x 3927 m.m_42 = self.m_42 - x 3928 m.m_43 = self.m_43 - x 3929 m.m_44 = self.m_44 - x 3930 return m 3931 else: 3932 raise TypeError("do not know how to substract Matrix44 and %s" 3933 % x.__class__)
3934
3935 - def sup_norm(self):
3936 """Calculate supremum norm of matrix (maximum absolute value of all 3937 entries).""" 3938 return max(max(abs(elem) for elem in row) 3939 for row in self.as_list())
3940
3941 - class NiAVObject:
3942 """ 3943 >>> from pyffi.formats.nif import NifFormat 3944 >>> node = NifFormat.NiNode() 3945 >>> prop1 = NifFormat.NiProperty() 3946 >>> prop1.name = "hello" 3947 >>> prop2 = NifFormat.NiProperty() 3948 >>> prop2.name = "world" 3949 >>> node.get_properties() 3950 [] 3951 >>> node.set_properties([prop1, prop2]) 3952 >>> [prop.name for prop in node.get_properties()] 3953 ['hello', 'world'] 3954 >>> [prop.name for prop in node.properties] 3955 ['hello', 'world'] 3956 >>> node.set_properties([]) 3957 >>> node.get_properties() 3958 [] 3959 >>> # now set them the other way around 3960 >>> node.set_properties([prop2, prop1]) 3961 >>> [prop.name for prop in node.get_properties()] 3962 ['world', 'hello'] 3963 >>> [prop.name for prop in node.properties] 3964 ['world', 'hello'] 3965 >>> node.remove_property(prop2) 3966 >>> [prop.name for prop in node.properties] 3967 ['hello'] 3968 >>> node.add_property(prop2) 3969 >>> [prop.name for prop in node.properties] 3970 ['hello', 'world'] 3971 """
3972 - def add_property(self, prop):
3973 """Add the given property to the property list. 3974 3975 :param prop: The property block to add. 3976 :type prop: L{NifFormat.NiProperty} 3977 """ 3978 num_props = self.num_properties 3979 self.num_properties = num_props + 1 3980 self.properties.update_size() 3981 self.properties[num_props] = prop
3982
3983 - def remove_property(self, prop):
3984 """Remove the given property to the property list. 3985 3986 :param prop: The property block to remove. 3987 :type prop: L{NifFormat.NiProperty} 3988 """ 3989 self.set_properties([otherprop for otherprop in self.get_properties() 3990 if not(otherprop is prop)])
3991
3992 - def get_properties(self):
3993 """Return a list of the properties of the block. 3994 3995 :return: The list of properties. 3996 :rtype: ``list`` of L{NifFormat.NiProperty} 3997 """ 3998 return [prop for prop in self.properties]
3999
4000 - def set_properties(self, proplist):
4001 """Set the list of properties from the given list (destroys existing list). 4002 4003 :param proplist: The list of property blocks to set. 4004 :type proplist: ``list`` of L{NifFormat.NiProperty} 4005 """ 4006 self.num_properties = len(proplist) 4007 self.properties.update_size() 4008 for i, prop in enumerate(proplist): 4009 self.properties[i] = prop
4010
4011 - def get_transform(self, relative_to=None):
4012 """Return scale, rotation, and translation into a single 4x4 4013 matrix, relative to the C{relative_to} block (which should be 4014 another NiAVObject connecting to this block). If C{relative_to} is 4015 ``None``, then returns the transform stored in C{self}, or 4016 equivalently, the target is assumed to be the parent. 4017 4018 :param relative_to: The block relative to which the transform must 4019 be calculated. If ``None``, the local transform is returned. 4020 """ 4021 m = NifFormat.Matrix44() 4022 m.set_scale_rotation_translation(self.scale, self.rotation, self.translation) 4023 if not relative_to: return m 4024 # find chain from relative_to to self 4025 chain = relative_to.find_chain(self, block_type = NifFormat.NiAVObject) 4026 if not chain: 4027 raise ValueError( 4028 'cannot find a chain of NiAVObject blocks ' 4029 'between %s and %s.' % (self.name, relative_to.name)) 4030 # and multiply with all transform matrices (not including relative_to) 4031 for block in reversed(chain[1:-1]): 4032 m *= block.get_transform() 4033 return m
4034
4035 - def set_transform(self, m):
4036 """Set rotation, translation, and scale, from a 4x4 matrix. 4037 4038 :param m: The matrix to which the transform should be set.""" 4039 scale, rotation, translation = m.get_scale_rotation_translation() 4040 4041 self.scale = scale 4042 4043 self.rotation.m_11 = rotation.m_11 4044 self.rotation.m_12 = rotation.m_12 4045 self.rotation.m_13 = rotation.m_13 4046 self.rotation.m_21 = rotation.m_21 4047 self.rotation.m_22 = rotation.m_22 4048 self.rotation.m_23 = rotation.m_23 4049 self.rotation.m_31 = rotation.m_31 4050 self.rotation.m_32 = rotation.m_32 4051 self.rotation.m_33 = rotation.m_33 4052 4053 self.translation.x = translation.x 4054 self.translation.y = translation.y 4055 self.translation.z = translation.z
4056
4057 - def apply_scale(self, scale):
4058 """Apply scale factor on data. 4059 4060 :param scale: The scale factor.""" 4061 # apply scale on translation 4062 self.translation.x *= scale 4063 self.translation.y *= scale 4064 self.translation.z *= scale 4065 # apply scale on bounding box 4066 self.bounding_box.translation.x *= scale 4067 self.bounding_box.translation.y *= scale 4068 self.bounding_box.translation.z *= scale 4069 self.bounding_box.radius.x *= scale 4070 self.bounding_box.radius.y *= scale 4071 self.bounding_box.radius.z *= scale
4072
4073 - class NiBSplineCompTransformInterpolator:
4074 - def get_translations(self):
4075 """Return an iterator over all translation keys.""" 4076 return self._getCompKeys(self.translation_offset, 3, 4077 self.translation_bias, self.translation_multiplier)
4078
4079 - def get_rotations(self):
4080 """Return an iterator over all rotation keys.""" 4081 return self._getCompKeys(self.rotation_offset, 4, 4082 self.rotation_bias, self.rotation_multiplier)
4083
4084 - def get_scales(self):
4085 """Return an iterator over all scale keys.""" 4086 for key in self._getCompKeys(self.scale_offset, 1, 4087 self.scale_bias, self.scale_multiplier): 4088 yield key[0]
4089
4090 - def apply_scale(self, scale):
4091 """Apply scale factor on data.""" 4092 self.translation.x *= scale 4093 self.translation.y *= scale 4094 self.translation.z *= scale 4095 self.translation_bias *= scale 4096 self.translation_multiplier *= scale
4097
4098 - class NiBSplineData:
4099 """ 4100 >>> # a doctest 4101 >>> from pyffi.formats.nif import NifFormat 4102 >>> block = NifFormat.NiBSplineData() 4103 >>> block.num_short_control_points = 50 4104 >>> block.short_control_points.update_size() 4105 >>> for i in range(block.num_short_control_points): 4106 ... block.short_control_points[i] = 20 - i 4107 >>> list(block.get_short_data(12, 4, 3)) 4108 [(8, 7, 6), (5, 4, 3), (2, 1, 0), (-1, -2, -3)] 4109 >>> offset = block.append_short_data([(1,2),(4,3),(13,14),(8,2),(33,33)]) 4110 >>> offset 4111 50 4112 >>> list(block.get_short_data(offset, 5, 2)) 4113 [(1, 2), (4, 3), (13, 14), (8, 2), (33, 33)] 4114 >>> list(block.get_comp_data(offset, 5, 2, 10.0, 32767.0)) 4115 [(11.0, 12.0), (14.0, 13.0), (23.0, 24.0), (18.0, 12.0), (43.0, 43.0)] 4116 >>> block.append_float_data([(1.0,2.0),(3.0,4.0),(0.5,0.25)]) 4117 0 4118 >>> list(block.get_float_data(0, 3, 2)) 4119 [(1.0, 2.0), (3.0, 4.0), (0.5, 0.25)] 4120 >>> block.append_comp_data([(1,2),(4,3)]) 4121 (60, 2.5, 1.5) 4122 >>> list(block.get_short_data(60, 2, 2)) 4123 [(-32767, -10922), (32767, 10922)] 4124 >>> list(block.get_comp_data(60, 2, 2, 2.5, 1.5)) # doctest: +ELLIPSIS 4125 [(1.0, 2.00...), (4.0, 2.99...)] 4126 """
4127 - def _getData(self, offset, num_elements, element_size, controlpoints):
4128 """Helper function for get_float_data and get_short_data. For internal 4129 use only.""" 4130 # check arguments 4131 if not (controlpoints is self.float_control_points 4132 or controlpoints is self.short_control_points): 4133 raise ValueError("internal error while appending data") 4134 # parse the data 4135 for element in xrange(num_elements): 4136 yield tuple( 4137 controlpoints[offset + element * element_size + index] 4138 for index in xrange(element_size))
4139
4140 - def _appendData(self, data, controlpoints):
4141 """Helper function for append_float_data and append_short_data. For internal 4142 use only.""" 4143 # get number of elements 4144 num_elements = len(data) 4145 # empty list, do nothing 4146 if num_elements == 0: 4147 return 4148 # get element size 4149 element_size = len(data[0]) 4150 # store offset at which we append the data 4151 if controlpoints is self.float_control_points: 4152 offset = self.num_float_control_points 4153 self.num_float_control_points += num_elements * element_size 4154 elif controlpoints is self.short_control_points: 4155 offset = self.num_short_control_points 4156 self.num_short_control_points += num_elements * element_size 4157 else: 4158 raise ValueError("internal error while appending data") 4159 # update size 4160 controlpoints.update_size() 4161 # store the data 4162 for element, datum in enumerate(data): 4163 for index, value in enumerate(datum): 4164 controlpoints[offset + element * element_size + index] = value 4165 # return the offset 4166 return offset
4167
4168 - def get_short_data(self, offset, num_elements, element_size):
4169 """Get an iterator to the data. 4170 4171 :param offset: The offset in the data where to start. 4172 :param num_elements: Number of elements to get. 4173 :param element_size: Size of a single element. 4174 :return: A list of C{num_elements} tuples of size C{element_size}. 4175 """ 4176 return self._getData( 4177 offset, num_elements, element_size, self.short_control_points)
4178
4179 - def get_comp_data(self, offset, num_elements, element_size, bias, multiplier):
4180 """Get an interator to the data, converted to float with extra bias and 4181 multiplication factor. If C{x} is the short value, then the returned value 4182 is C{bias + x * multiplier / 32767.0}. 4183 4184 :param offset: The offset in the data where to start. 4185 :param num_elements: Number of elements to get. 4186 :param element_size: Size of a single element. 4187 :param bias: Value bias. 4188 :param multiplier: Value multiplier. 4189 :return: A list of C{num_elements} tuples of size C{element_size}. 4190 """ 4191 for key in self.get_short_data(offset, num_elements, element_size): 4192 yield tuple(bias + x * multiplier / 32767.0 for x in key)
4193
4194 - def append_short_data(self, data):
4195 """Append data. 4196 4197 :param data: A list of elements, where each element is a tuple of 4198 integers. (Note: cannot be an interator; maybe this restriction 4199 will be removed in a future version.) 4200 :return: The offset at which the data was appended.""" 4201 return self._appendData(data, self.short_control_points)
4202
4203 - def append_comp_data(self, data):
4204 """Append data as compressed list. 4205 4206 :param data: A list of elements, where each element is a tuple of 4207 integers. (Note: cannot be an interator; maybe this restriction 4208 will be removed in a future version.) 4209 :return: The offset, bias, and multiplier.""" 4210 # get extremes 4211 maxvalue = max(max(datum) for datum in data) 4212 minvalue = min(min(datum) for datum in data) 4213 # get bias and multiplier 4214 bias = 0.5 * (maxvalue + minvalue) 4215 if maxvalue > minvalue: 4216 multiplier = 0.5 * (maxvalue - minvalue) 4217 else: 4218 # no need to compress in this case 4219 multiplier = 1.0 4220 4221 # compress points into shorts 4222 shortdata = [] 4223 for datum in data: 4224 shortdata.append(tuple(int(32767 * (x - bias) / multiplier) 4225 for x in datum)) 4226 return (self._appendData(shortdata, self.short_control_points), 4227 bias, multiplier)
4228
4229 - def get_float_data(self, offset, num_elements, element_size):
4230 """Get an iterator to the data. 4231 4232 :param offset: The offset in the data where to start. 4233 :param num_elements: Number of elements to get. 4234 :param element_size: Size of a single element. 4235 :return: A list of C{num_elements} tuples of size C{element_size}. 4236 """ 4237 return self._getData( 4238 offset, num_elements, element_size, self.float_control_points)
4239
4240 - def append_float_data(self, data):
4241 """Append data. 4242 4243 :param data: A list of elements, where each element is a tuple of 4244 floats. (Note: cannot be an interator; maybe this restriction 4245 will be removed in a future version.) 4246 :return: The offset at which the data was appended.""" 4247 return self._appendData(data, self.float_control_points)
4248
4249 - class NiBSplineInterpolator:
4250 - def get_times(self):
4251 """Return an iterator over all key times. 4252 4253 @todo: When code for calculating the bsplines is ready, this function 4254 will return exactly self.basis_data.num_control_points - 1 time points, and 4255 not self.basis_data.num_control_points as it is now. 4256 """ 4257 # is there basis data? 4258 if not self.basis_data: 4259 return 4260 # return all times 4261 for i in xrange(self.basis_data.num_control_points): 4262 yield ( 4263 self.start_time 4264 + (i * (self.stop_time - self.start_time) 4265 / (self.basis_data.num_control_points - 1)) 4266 )
4267
4268 - def _getFloatKeys(self, offset, element_size):
4269 """Helper function to get iterator to various keys. Internal use only.""" 4270 # are there keys? 4271 if offset == 65535: 4272 return 4273 # is there basis data and spline data? 4274 if not self.basis_data or not self.spline_data: 4275 return 4276 # yield all keys 4277 for key in self.spline_data.get_float_data(offset, 4278 self.basis_data.num_control_points, 4279 element_size): 4280 yield key
4281
4282 - def _getCompKeys(self, offset, element_size, bias, multiplier):
4283 """Helper function to get iterator to various keys. Internal use only.""" 4284 # are there keys? 4285 if offset == 65535: 4286 return 4287 # is there basis data and spline data? 4288 if not self.basis_data or not self.spline_data: 4289 return 4290 # yield all keys 4291 for key in self.spline_data.get_comp_data(offset, 4292 self.basis_data.num_control_points, 4293 element_size, 4294 bias, multiplier): 4295 yield key
4296
4297 - class NiBSplineTransformInterpolator:
4298 - def get_translations(self):
4299 """Return an iterator over all translation keys.""" 4300 return self._getFloatKeys(self.translation_offset, 3)
4301
4302 - def get_rotations(self):
4303 """Return an iterator over all rotation keys.""" 4304 return self._getFloatKeys(self.rotation_offset, 4)
4305
4306 - def get_scales(self):
4307 """Return an iterator over all scale keys.""" 4308 for key in self._getFloatKeys(self.scale_offset, 1): 4309 yield key[0]
4310
4311 - def apply_scale(self, scale):
4312 """Apply scale factor on data.""" 4313 self.translation.x *= scale 4314 self.translation.y *= scale 4315 self.translation.z *= scale 4316 # also scale translation float keys 4317 if self.translation_offset != 65535: 4318 offset = self.translation_offset 4319 num_elements = self.basis_data.num_control_points 4320 element_size = 3 4321 controlpoints = self.spline_data.float_control_points 4322 for element in xrange(num_elements): 4323 for index in xrange(element_size): 4324 controlpoints[offset + element * element_size + index] *= scale
4325
4326 - class NiControllerSequence:
4327 - def add_controlled_block(self):
4328 """Create new controlled block, and return it. 4329 4330 >>> seq = NifFormat.NiControllerSequence() 4331 >>> seq.num_controlled_blocks 4332 0 4333 >>> ctrlblock = seq.add_controlled_block() 4334 >>> seq.num_controlled_blocks 4335 1 4336 >>> isinstance(ctrlblock, NifFormat.ControllerLink) 4337 True 4338 """ 4339 # add to the list 4340 num_blocks = self.num_controlled_blocks 4341 self.num_controlled_blocks = num_blocks + 1 4342 self.controlled_blocks.update_size() 4343 return self.controlled_blocks[-1]
4344
4345 - class NiGeometryData:
4346 """ 4347 >>> from pyffi.formats.nif import NifFormat 4348 >>> geomdata = NifFormat.NiGeometryData() 4349 >>> geomdata.num_vertices = 3 4350 >>> geomdata.has_vertices = True 4351 >>> geomdata.has_normals = True 4352 >>> geomdata.has_vertex_colors = True 4353 >>> geomdata.num_uv_sets = 2 4354 >>> geomdata.vertices.update_size() 4355 >>> geomdata.normals.update_size() 4356 >>> geomdata.vertex_colors.update_size() 4357 >>> geomdata.uv_sets.update_size() 4358 >>> geomdata.vertices[0].x = 1 4359 >>> geomdata.vertices[0].y = 2 4360 >>> geomdata.vertices[0].z = 3 4361 >>> geomdata.vertices[1].x = 4 4362 >>> geomdata.vertices[1].y = 5 4363 >>> geomdata.vertices[1].z = 6 4364 >>> geomdata.vertices[2].x = 1.200001 4365 >>> geomdata.vertices[2].y = 3.400001 4366 >>> geomdata.vertices[2].z = 5.600001 4367 >>> geomdata.normals[0].x = 0 4368 >>> geomdata.normals[0].y = 0 4369 >>> geomdata.normals[0].z = 1 4370 >>> geomdata.normals[1].x = 0 4371 >>> geomdata.normals[1].y = 1 4372 >>> geomdata.normals[1].z = 0 4373 >>> geomdata.normals[2].x = 1 4374 >>> geomdata.normals[2].y = 0 4375 >>> geomdata.normals[2].z = 0 4376 >>> geomdata.vertex_colors[1].r = 0.310001 4377 >>> geomdata.vertex_colors[1].g = 0.320001 4378 >>> geomdata.vertex_colors[1].b = 0.330001 4379 >>> geomdata.vertex_colors[1].a = 0.340001 4380 >>> geomdata.uv_sets[0][0].u = 0.990001 4381 >>> geomdata.uv_sets[0][0].v = 0.980001 4382 >>> geomdata.uv_sets[0][2].u = 0.970001 4383 >>> geomdata.uv_sets[0][2].v = 0.960001 4384 >>> geomdata.uv_sets[1][0].v = 0.910001 4385 >>> geomdata.uv_sets[1][0].v = 0.920001 4386 >>> geomdata.uv_sets[1][2].v = 0.930001 4387 >>> geomdata.uv_sets[1][2].v = 0.940001 4388 >>> for h in geomdata.get_vertex_hash_generator(): 4389 ... print(h) 4390 (1000, 2000, 3000, 0, 0, 1000, 99000, 98000, 0, 92000, 0, 0, 0, 0) 4391 (4000, 5000, 6000, 0, 1000, 0, 0, 0, 0, 0, 310, 320, 330, 340) 4392 (1200, 3400, 5600, 1000, 0, 0, 97000, 96000, 0, 94000, 0, 0, 0, 0) 4393 """
4394 - def update_center_radius(self):
4395 """Recalculate center and radius of the data.""" 4396 # in case there are no vertices, set center and radius to zero 4397 if len(self.vertices) == 0: 4398 self.center.x = 0.0 4399 self.center.y = 0.0 4400 self.center.z = 0.0 4401 self.radius = 0.0 4402 return 4403 4404 # find extreme values in x, y, and z direction 4405 lowx = min([v.x for v in self.vertices]) 4406 lowy = min([v.y for v in self.vertices]) 4407 lowz = min([v.z for v in self.vertices]) 4408 highx = max([v.x for v in self.vertices]) 4409 highy = max([v.y for v in self.vertices]) 4410 highz = max([v.z for v in self.vertices]) 4411 4412 # center is in the center of the bounding box 4413 cx = (lowx + highx) * 0.5 4414 cy = (lowy + highy) * 0.5 4415 cz = (lowz + highz) * 0.5 4416 self.center.x = cx 4417 self.center.y = cy 4418 self.center.z = cz 4419 4420 # radius is the largest distance from the center 4421 r2 = 0.0 4422 for v in self.vertices: 4423 dx = cx - v.x 4424 dy = cy - v.y 4425 dz = cz - v.z 4426 r2 = max(r2, dx*dx+dy*dy+dz*dz) 4427 self.radius = r2 ** 0.5
4428
4429 - def apply_scale(self, scale):
4430 """Apply scale factor on data.""" 4431 if abs(scale - 1.0) < NifFormat.EPSILON: return 4432 for v in self.vertices: 4433 v.x *= scale 4434 v.y *= scale 4435 v.z *= scale 4436 self.center.x *= scale 4437 self.center.y *= scale 4438 self.center.z *= scale 4439 self.radius *= scale
4440
4441 - def get_vertex_hash_generator( 4442 self, 4443 vertexprecision=3, normalprecision=3, 4444 uvprecision=5, vcolprecision=3):
4445 """Generator which produces a tuple of integers for each 4446 (vertex, normal, uv, vcol), to ease detection of duplicate 4447 vertices. The precision parameters denote number of 4448 significant digits behind the comma. 4449 4450 Default for uvprecision should really be high because for 4451 very large models the uv coordinates can be very close 4452 together. 4453 4454 For vertexprecision, 3 seems usually enough (maybe we'll 4455 have to increase this at some point). 4456 4457 :param vertexprecision: Precision to be used for vertices. 4458 :type vertexprecision: float 4459 :param normalprecision: Precision to be used for normals. 4460 :type normalprecision: float 4461 :param uvprecision: Precision to be used for uvs. 4462 :type uvprecision: float 4463 :param vcolprecision: Precision to be used for vertex colors. 4464 :type vcolprecision: float 4465 :return: A generator yielding a hash value for each vertex. 4466 """ 4467 4468 verts = self.vertices if self.has_vertices else None 4469 norms = self.normals if self.has_normals else None 4470 uvsets = self.uv_sets if len(self.uv_sets) else None 4471 vcols = self.vertex_colors if self.has_vertex_colors else None 4472 vertexfactor = 10 ** vertexprecision 4473 normalfactor = 10 ** normalprecision 4474 uvfactor = 10 ** uvprecision 4475 vcolfactor = 10 ** vcolprecision 4476 for i in xrange(self.num_vertices): 4477 h = [] 4478 if verts: 4479 h.extend([float_to_int(x * vertexfactor) 4480 for x in [verts[i].x, verts[i].y, verts[i].z]]) 4481 if norms: 4482 h.extend([float_to_int(x * normalfactor) 4483 for x in [norms[i].x, norms[i].y, norms[i].z]]) 4484 if uvsets: 4485 for uvset in uvsets: 4486 # uvs sometimes have NaN, for example: 4487 # oblivion/meshes/architecture/anvil/anvildooruc01.nif 4488 h.extend([float_to_int(x * uvfactor) 4489 for x in [uvset[i].u, uvset[i].v]]) 4490 if vcols: 4491 h.extend([float_to_int(x * vcolfactor) 4492 for x in [vcols[i].r, vcols[i].g, 4493 vcols[i].b, vcols[i].a]]) 4494 yield tuple(h)
4495
4496 - class NiGeometry:
4497 """ 4498 >>> from pyffi.formats.nif import NifFormat 4499 >>> id44 = NifFormat.Matrix44() 4500 >>> id44.set_identity() 4501 >>> skelroot = NifFormat.NiNode() 4502 >>> skelroot.name = 'skelroot' 4503 >>> skelroot.set_transform(id44) 4504 >>> bone1 = NifFormat.NiNode() 4505 >>> bone1.name = 'bone1' 4506 >>> bone1.set_transform(id44) 4507 >>> bone2 = NifFormat.NiNode() 4508 >>> bone2.name = 'bone2' 4509 >>> bone2.set_transform(id44) 4510 >>> bone21 = NifFormat.NiNode() 4511 >>> bone21.name = 'bone21' 4512 >>> bone21.set_transform(id44) 4513 >>> bone22 = NifFormat.NiNode() 4514 >>> bone22.name = 'bone22' 4515 >>> bone22.set_transform(id44) 4516 >>> bone211 = NifFormat.NiNode() 4517 >>> bone211.name = 'bone211' 4518 >>> bone211.set_transform(id44) 4519 >>> skelroot.add_child(bone1) 4520 >>> bone1.add_child(bone2) 4521 >>> bone2.add_child(bone21) 4522 >>> bone2.add_child(bone22) 4523 >>> bone21.add_child(bone211) 4524 >>> geom = NifFormat.NiTriShape() 4525 >>> geom.name = 'geom' 4526 >>> geom.set_transform(id44) 4527 >>> geomdata = NifFormat.NiTriShapeData() 4528 >>> skininst = NifFormat.NiSkinInstance() 4529 >>> skindata = NifFormat.NiSkinData() 4530 >>> skelroot.add_child(geom) 4531 >>> geom.data = geomdata 4532 >>> geom.skin_instance = skininst 4533 >>> skininst.skeleton_root = skelroot 4534 >>> skininst.data = skindata 4535 >>> skininst.num_bones = 4 4536 >>> skininst.bones.update_size() 4537 >>> skininst.bones[0] = bone1 4538 >>> skininst.bones[1] = bone2 4539 >>> skininst.bones[2] = bone22 4540 >>> skininst.bones[3] = bone211 4541 >>> skindata.num_bones = 4 4542 >>> skindata.bone_list.update_size() 4543 >>> [child.name for child in skelroot.children] 4544 ['bone1', 'geom'] 4545 >>> skindata.set_transform(id44) 4546 >>> for bonedata in skindata.bone_list: 4547 ... bonedata.set_transform(id44) 4548 >>> affectedbones = geom.flatten_skin() 4549 >>> [bone.name for bone in affectedbones] 4550 ['bone1', 'bone2', 'bone22', 'bone211'] 4551 >>> [child.name for child in skelroot.children] 4552 ['geom', 'bone1', 'bone21', 'bone2', 'bone22', 'bone211'] 4553 """
4554 - def is_skin(self):
4555 """Returns True if geometry is skinned.""" 4556 return self.skin_instance != None
4557
4558 - def _validate_skin(self):
4559 """Check that skinning blocks are valid. Will raise NifError exception 4560 if not.""" 4561 if self.skin_instance == None: return 4562 if self.skin_instance.data == None: 4563 raise NifFormat.NifError('NiGeometry has NiSkinInstance without NiSkinData') 4564 if self.skin_instance.skeleton_root == None: 4565 raise NifFormat.NifError('NiGeometry has NiSkinInstance without skeleton root') 4566 if self.skin_instance.num_bones != self.skin_instance.data.num_bones: 4567 raise NifFormat.NifError('NiSkinInstance and NiSkinData have different number of bones')
4568
4569 - def add_bone(self, bone, vert_weights):
4570 """Add bone with given vertex weights. 4571 After adding all bones, the geometry skinning information should be set 4572 from the current position of the bones using the L{update_bind_position} function. 4573 4574 :param bone: The bone NiNode block. 4575 :param vert_weights: A dictionary mapping each influenced vertex index to a vertex weight.""" 4576 self._validate_skin() 4577 skininst = self.skin_instance 4578 skindata = skininst.data 4579 skelroot = skininst.skeleton_root 4580 4581 bone_index = skininst.num_bones 4582 skininst.num_bones = bone_index+1 4583 skininst.bones.update_size() 4584 skininst.bones[bone_index] = bone 4585 skindata.num_bones = bone_index+1 4586 skindata.bone_list.update_size() 4587 skinbonedata = skindata.bone_list[bone_index] 4588 # set vertex weights 4589 skinbonedata.num_vertices = len(vert_weights) 4590 skinbonedata.vertex_weights.update_size() 4591 for i, (vert_index, vert_weight) in enumerate(vert_weights.iteritems()): 4592 skinbonedata.vertex_weights[i].index = vert_index 4593 skinbonedata.vertex_weights[i].weight = vert_weight
4594 4595 4596
4597 - def get_vertex_weights(self):
4598 """Get vertex weights in a convenient format: list bone and weight per 4599 vertex.""" 4600 # shortcuts relevant blocks 4601 if not self.skin_instance: 4602 raise NifFormat.NifError('Cannot get vertex weights of geometry without skin.') 4603 self._validate_skin() 4604 geomdata = self.data 4605 skininst = self.skin_instance 4606 skindata = skininst.data 4607 # XXX todo: should we use list of dictionaries for this 4608 # where each dict maps bone number to the weight? 4609 weights = [[] for i in xrange(geomdata.num_vertices)] 4610 for bonenum, bonedata in enumerate(skindata.bone_list): 4611 for skinweight in bonedata.vertex_weights: 4612 # skip zero weights 4613 if skinweight.weight != 0: 4614 # boneweightlist is the list of (bonenum, weight) pairs that 4615 # we must update now 4616 boneweightlist = weights[skinweight.index] 4617 # is bonenum already in there? 4618 for i, (otherbonenum, otherweight) in enumerate(boneweightlist): 4619 if otherbonenum == bonenum: 4620 # yes! add the weight to the bone 4621 boneweightlist[i][1] += skinweight.weight 4622 break 4623 else: 4624 # nope... so add new [bone, weight] entry 4625 boneweightlist.append([bonenum, skinweight.weight]) 4626 return weights
4627 4628
4629 - def flatten_skin(self):
4630 """Reposition all bone blocks and geometry block in the tree to be direct 4631 children of the skeleton root. 4632 4633 Returns list of all used bones by the skin.""" 4634 4635 if not self.is_skin(): return [] # nothing to do 4636 4637 result = [] # list of repositioned bones 4638 self._validate_skin() # validate the skin 4639 skininst = self.skin_instance 4640 skindata = skininst.data 4641 skelroot = skininst.skeleton_root 4642 4643 # reparent geometry 4644 self.set_transform(self.get_transform(skelroot)) 4645 geometry_parent = skelroot.find_chain(self, block_type = NifFormat.NiAVObject)[-2] 4646 geometry_parent.remove_child(self) # detatch geometry from tree 4647 skelroot.add_child(self, front = True) # and attatch it to the skeleton root 4648 4649 # reparent all the bone blocks 4650 for bone_block in skininst.bones: 4651 # skeleton root, if it is used as bone, does not need to be processed 4652 if bone_block == skelroot: continue 4653 # get bone parent 4654 bone_parent = skelroot.find_chain(bone_block, block_type = NifFormat.NiAVObject)[-2] 4655 # set new child transforms 4656 for child in bone_block.children: 4657 child.set_transform(child.get_transform(bone_parent)) 4658 # reparent children 4659 for child in bone_block.children: 4660 bone_parent.add_child(child) 4661 bone_block.num_children = 0 4662 bone_block.children.update_size() # = remove_child on each child 4663 # set new bone transform 4664 bone_block.set_transform(bone_block.get_transform(skelroot)) 4665 # reparent bone block 4666 bone_parent.remove_child(bone_block) 4667 skelroot.add_child(bone_block) 4668 result.append(bone_block) 4669 4670 return result
4671 4672 4673 4674 # The nif skinning algorithm works as follows (as of nifskope): 4675 # v' # vertex after skinning in geometry space 4676 # = sum over {b in skininst.bones} # sum over all bones b that influence the mesh 4677 # weight[v][b] # how much bone b influences vertex v 4678 # * v # vertex before skinning in geometry space (as it is stored in the shape data) 4679 # * skindata.bone_list[b].transform # transform vertex to bone b space in the rest pose 4680 # * b.get_transform(skelroot) # apply animation, by multiplying with all bone matrices in the chain down to the skeleton root; the vertex is now in skeleton root space 4681 # * skindata.transform # transforms vertex from skeleton root space back to geometry space
4682 - def get_skin_deformation(self):
4683 """Returns a list of vertices and normals in their final position after 4684 skinning, in geometry space.""" 4685 4686 if not self.data: return [], [] 4687 4688 if not self.is_skin(): return self.data.vertices, self.data.normals 4689 4690 self._validate_skin() 4691 skininst = self.skin_instance 4692 skindata = skininst.data 4693 skelroot = skininst.skeleton_root 4694 4695 vertices = [ NifFormat.Vector3() for i in xrange(self.data.num_vertices) ] 4696 normals = [ NifFormat.Vector3() for i in xrange(self.data.num_vertices) ] 4697 sumweights = [ 0.0 for i in xrange(self.data.num_vertices) ] 4698 skin_offset = skindata.get_transform() 4699 for i, bone_block in enumerate(skininst.bones): 4700 bonedata = skindata.bone_list[i] 4701 bone_offset = bonedata.get_transform() 4702 bone_matrix = bone_block.get_transform(skelroot) 4703 transform = bone_offset * bone_matrix * skin_offset 4704 scale, rotation, translation = transform.get_scale_rotation_translation() 4705 for skinweight in bonedata.vertex_weights: 4706 index = skinweight.index 4707 weight = skinweight.weight 4708 vertices[index] += weight * (self.data.vertices[index] * transform) 4709 if self.data.has_normals: 4710 normals[index] += weight * (self.data.normals[index] * rotation) 4711 sumweights[index] += weight 4712 4713 for i, s in enumerate(sumweights): 4714 if abs(s - 1.0) > 0.01: 4715 logging.getLogger("pyffi.nif.nigeometry").warn( 4716 "vertex %i has weights not summing to one" % i) 4717 4718 return vertices, normals
4719 4720 4721 4722 # ported and extended from niflib::NiNode::GoToSkeletonBindPosition() (r2518)
4724 """Send all bones to their bind position. 4725 4726 @deprecated: Use L{NifFormat.NiNode.send_bones_to_bind_position} instead of 4727 this function. 4728 """ 4729 4730 warnings.warn("use NifFormat.NiNode.send_bones_to_bind_position", 4731 DeprecationWarning) 4732 4733 if not self.is_skin(): 4734 return 4735 4736 # validate skin and set up quick links 4737 self._validate_skin() 4738 skininst = self.skin_instance 4739 skindata = skininst.data 4740 skelroot = skininst.skeleton_root 4741 4742 # reposition the bones 4743 for i, parent_bone in enumerate(skininst.bones): 4744 parent_offset = skindata.bone_list[i].get_transform() 4745 # if parent_bone is a child of the skeleton root, then fix its 4746 # transfrom 4747 if parent_bone in skelroot.children: 4748 parent_bone.set_transform(parent_offset.get_inverse() * self.get_transform(skelroot)) 4749 # fix the transform of all its children 4750 for j, child_bone in enumerate(skininst.bones): 4751 if child_bone not in parent_bone.children: continue 4752 child_offset = skindata.bone_list[j].get_transform() 4753 child_matrix = child_offset.get_inverse() * parent_offset 4754 child_bone.set_transform(child_matrix)
4755 4756 4757 4758 # ported from niflib::NiSkinData::ResetOffsets (r2561)
4759 - def update_bind_position(self):
4760 """Make current position of the bones the bind position for this geometry. 4761 4762 Sets the NiSkinData overall transform to the inverse of the geometry transform 4763 relative to the skeleton root, and sets the NiSkinData of each bone to 4764 the geometry transform relative to the skeleton root times the inverse of the bone 4765 transform relative to the skeleton root.""" 4766 if not self.is_skin(): return 4767 4768 # validate skin and set up quick links 4769 self._validate_skin() 4770 skininst = self.skin_instance 4771 skindata = skininst.data 4772 skelroot = skininst.skeleton_root 4773 4774 # calculate overall offset 4775 geomtransform = self.get_transform(skelroot) 4776 skindata.set_transform(geomtransform.get_inverse()) 4777 4778 # calculate bone offsets 4779 for i, bone in enumerate(skininst.bones): 4780 skindata.bone_list[i].set_transform(geomtransform * bone.get_transform(skelroot).get_inverse())
4781
4782 - def get_skin_partition(self):
4783 """Return the skin partition block.""" 4784 skininst = self.skin_instance 4785 if not skininst: 4786 skinpart = None 4787 else: 4788 skinpart = skininst.skin_partition 4789 if not skinpart: 4790 skindata = skininst.data 4791 if skindata: 4792 skinpart = skindata.skin_partition 4793 4794 return skinpart
4795
4796 - def set_skin_partition(self, skinpart):
4797 """Set skin partition block.""" 4798 skininst = self.skin_instance 4799 if not skininst: 4800 raise ValueError("Geometry has no skin instance.") 4801 4802 skindata = skininst.data 4803 if not skindata: 4804 raise ValueError("Geometry has no skin data.") 4805 4806 skininst.skin_partition = skinpart 4807 skindata.skin_partition = skinpart
4808
4809 - class NiKeyframeData:
4810 - def apply_scale(self, scale):
4811 """Apply scale factor on data.""" 4812 for key in self.translations.keys: 4813 key.value.x *= scale 4814 key.value.y *= scale 4815 key.value.z *= scale
4816 #key.forward.x *= scale 4817 #key.forward.y *= scale 4818 #key.forward.z *= scale 4819 #key.backward.x *= scale 4820 #key.backward.y *= scale 4821 #key.backward.z *= scale 4822 # what to do with TBC? 4823
4824 - class NiMaterialColorController:
4825 - def get_target_color(self):
4826 """Get target color (works for all nif versions).""" 4827 return ((self.flags >> 4) & 7) | self.target_color
4828
4829 - def set_target_color(self, target_color):
4830 """Set target color (works for all nif versions).""" 4831 self.flags |= (target_color & 7) << 4 4832 self.target_color = target_color
4833
4834 - class NiMorphData:
4835 - def apply_scale(self, scale):
4836 """Apply scale factor on data.""" 4837 for morph in self.morphs: 4838 for v in morph.vectors: 4839 v.x *= scale 4840 v.y *= scale 4841 v.z *= scale
4842
4843 - class NiNode:
4844 """ 4845 >>> from pyffi.formats.nif import NifFormat 4846 >>> x = NifFormat.NiNode() 4847 >>> y = NifFormat.NiNode() 4848 >>> z = NifFormat.NiNode() 4849 >>> x.num_children =1 4850 >>> x.children.update_size() 4851 >>> y in x.children 4852 False 4853 >>> x.children[0] = y 4854 >>> y in x.children 4855 True 4856 >>> x.add_child(z, front = True) 4857 >>> x.add_child(y) 4858 >>> x.num_children 4859 2 4860 >>> x.children[0] is z 4861 True 4862 >>> x.remove_child(y) 4863 >>> y in x.children 4864 False 4865 >>> x.num_children 4866 1 4867 >>> e = NifFormat.NiSpotLight() 4868 >>> x.add_effect(e) 4869 >>> x.num_effects 4870 1 4871 >>> e in x.effects 4872 True 4873 4874 >>> from pyffi.formats.nif import NifFormat 4875 >>> node = NifFormat.NiNode() 4876 >>> child1 = NifFormat.NiNode() 4877 >>> child1.name = "hello" 4878 >>> child_2 = NifFormat.NiNode() 4879 >>> child_2.name = "world" 4880 >>> node.get_children() 4881 [] 4882 >>> node.set_children([child1, child_2]) 4883 >>> [child.name for child in node.get_children()] 4884 ['hello', 'world'] 4885 >>> [child.name for child in node.children] 4886 ['hello', 'world'] 4887 >>> node.set_children([]) 4888 >>> node.get_children() 4889 [] 4890 >>> # now set them the other way around 4891 >>> node.set_children([child_2, child1]) 4892 >>> [child.name for child in node.get_children()] 4893 ['world', 'hello'] 4894 >>> [child.name for child in node.children] 4895 ['world', 'hello'] 4896 >>> node.remove_child(child_2) 4897 >>> [child.name for child in node.children] 4898 ['hello'] 4899 >>> node.add_child(child_2) 4900 >>> [child.name for child in node.children] 4901 ['hello', 'world'] 4902 4903 >>> from pyffi.formats.nif import NifFormat 4904 >>> node = NifFormat.NiNode() 4905 >>> effect1 = NifFormat.NiSpotLight() 4906 >>> effect1.name = "hello" 4907 >>> effect2 = NifFormat.NiSpotLight() 4908 >>> effect2.name = "world" 4909 >>> node.get_effects() 4910 [] 4911 >>> node.set_effects([effect1, effect2]) 4912 >>> [effect.name for effect in node.get_effects()] 4913 ['hello', 'world'] 4914 >>> [effect.name for effect in node.effects] 4915 ['hello', 'world'] 4916 >>> node.set_effects([]) 4917 >>> node.get_effects() 4918 [] 4919 >>> # now set them the other way around 4920 >>> node.set_effects([effect2, effect1]) 4921 >>> [effect.name for effect in node.get_effects()] 4922 ['world', 'hello'] 4923 >>> [effect.name for effect in node.effects] 4924 ['world', 'hello'] 4925 >>> node.remove_effect(effect2) 4926 >>> [effect.name for effect in node.effects] 4927 ['hello'] 4928 >>> node.add_effect(effect2) 4929 >>> [effect.name for effect in node.effects] 4930 ['hello', 'world'] 4931 """
4932 - def add_child(self, child, front=False):
4933 """Add block to child list. 4934 4935 :param child: The child to add. 4936 :type child: L{NifFormat.NiAVObject} 4937 :keyword front: Whether to add to the front or to the end of the 4938 list (default is at end). 4939 :type front: ``bool`` 4940 """ 4941 # check if it's already a child 4942 if child in self.children: 4943 return 4944 # increase number of children 4945 num_children = self.num_children 4946 self.num_children = num_children + 1 4947 self.children.update_size() 4948 # add the child 4949 if not front: 4950 self.children[num_children] = child 4951 else: 4952 for i in xrange(num_children, 0, -1): 4953 self.children[i] = self.children[i-1] 4954 self.children[0] = child
4955
4956 - def remove_child(self, child):
4957 """Remove a block from the child list. 4958 4959 :param child: The child to remove. 4960 :type child: L{NifFormat.NiAVObject} 4961 """ 4962 self.set_children([otherchild for otherchild in self.get_children() 4963 if not(otherchild is child)])
4964
4965 - def get_children(self):
4966 """Return a list of the children of the block. 4967 4968 :return: The list of children. 4969 :rtype: ``list`` of L{NifFormat.NiAVObject} 4970 """ 4971 return [child for child in self.children]
4972
4973 - def set_children(self, childlist):
4974 """Set the list of children from the given list (destroys existing list). 4975 4976 :param childlist: The list of child blocks to set. 4977 :type childlist: ``list`` of L{NifFormat.NiAVObject} 4978 """ 4979 self.num_children = len(childlist) 4980 self.children.update_size() 4981 for i, child in enumerate(childlist): 4982 self.children[i] = child
4983
4984 - def add_effect(self, effect):
4985 """Add an effect to the list of effects. 4986 4987 :param effect: The effect to add. 4988 :type effect: L{NifFormat.NiDynamicEffect} 4989 """ 4990 num_effs = self.num_effects 4991 self.num_effects = num_effs + 1 4992 self.effects.update_size() 4993 self.effects[num_effs] = effect
4994
4995 - def remove_effect(self, effect):
4996 """Remove a block from the effect list. 4997 4998 :param effect: The effect to remove. 4999 :type effect: L{NifFormat.NiDynamicEffect} 5000 """ 5001 self.set_effects([othereffect for othereffect in self.get_effects() 5002 if not(othereffect is effect)])
5003
5004 - def get_effects(self):
5005 """Return a list of the effects of the block. 5006 5007 :return: The list of effects. 5008 :rtype: ``list`` of L{NifFormat.NiDynamicEffect} 5009 """ 5010 return [effect for effect in self.effects]
5011
5012 - def set_effects(self, effectlist):
5013 """Set the list of effects from the given list (destroys existing list). 5014 5015 :param effectlist: The list of effect blocks to set. 5016 :type effectlist: ``list`` of L{NifFormat.NiDynamicEffect} 5017 """ 5018 self.num_effects = len(effectlist) 5019 self.effects.update_size() 5020 for i, effect in enumerate(effectlist): 5021 self.effects[i] = effect
5022
5023 - def merge_external_skeleton_root(self, skelroot):
5024 """Attach skinned geometry to self (which will be the new skeleton root of 5025 the nif at the given skeleton root). Use this function if you move a 5026 skinned geometry from one nif into a new nif file. The bone links will be 5027 updated to point to the tree at self, instead of to the external tree. 5028 """ 5029 # sanity check 5030 if self.name != skelroot.name: 5031 raise ValueError("skeleton root names do not match") 5032 5033 # get a dictionary mapping bone names to bone blocks 5034 bone_dict = {} 5035 for block in self.tree(): 5036 if isinstance(block, NifFormat.NiNode): 5037 if block.name: 5038 if block.name in bone_dict: 5039 raise ValueError( 5040 "multiple NiNodes with name %s" % block.name) 5041 bone_dict[block.name] = block 5042 5043 # add all non-bone children of the skeleton root to self 5044 for child in skelroot.get_children(): 5045 # skip empty children 5046 if not child: 5047 continue 5048 # skip bones 5049 if child.name in bone_dict: 5050 continue 5051 # not a bone, so add it 5052 self.add_child(child) 5053 # fix links to skeleton root and bones 5054 for externalblock in child.tree(): 5055 if isinstance(externalblock, NifFormat.NiSkinInstance): 5056 if not(externalblock.skeleton_root is skelroot): 5057 raise ValueError( 5058 "expected skeleton root %s but got %s" 5059 % (skelroot.name, externalblock.skeleton_root.name)) 5060 externalblock.skeleton_root = self 5061 for i, externalbone in enumerate(externalblock.bones): 5062 externalblock.bones[i] = bone_dict[externalbone.name]
5063
5064 - def merge_skeleton_roots(self):
5065 """This function will look for other geometries whose skeleton 5066 root is a (possibly indirect) child of this node. It will then 5067 reparent those geometries to this node. For example, it will unify 5068 the skeleton roots in Morrowind's cliffracer.nif file, or of the 5069 (official) body skins. This makes it much easier to import 5070 skeletons in for instance Blender: there will be only one skeleton 5071 root for each bone, over all geometries. 5072 5073 The merge fails for those geometries whose global skin data 5074 transform does not match the inverse geometry transform relative to 5075 the skeleton root (the maths does not work out in this case!) 5076 5077 Returns list of all new blocks that have been reparented (and 5078 added to the skeleton root children list), and a list of blocks 5079 for which the merge failed. 5080 """ 5081 logger = logging.getLogger("pyffi.nif.ninode") 5082 5083 result = [] # list of reparented blocks 5084 failed = [] # list of blocks that could not be reparented 5085 5086 id44 = NifFormat.Matrix44() 5087 id44.set_identity() 5088 5089 # find the root block (direct parent of skeleton root that connects to the geometry) for each of these geometries 5090 for geom in self.get_global_iterator(): 5091 # make sure we only do each geometry once 5092 if (geom in result) or (geom in failed): 5093 continue 5094 # only geometries 5095 if not isinstance(geom, NifFormat.NiGeometry): 5096 continue 5097 # only skins 5098 if not geom.is_skin(): 5099 continue 5100 # only if they have a different skeleton root 5101 if geom.skin_instance.skeleton_root is self: 5102 continue 5103 # check transforms 5104 if (geom.skin_instance.data.get_transform() 5105 * geom.get_transform(geom.skin_instance.skeleton_root) != id44): 5106 logger.warn( 5107 "can't rebase %s: global skin data transform does not match " 5108 "geometry transform relative to skeleton root" % geom.name) 5109 failed.append(geom) 5110 continue # skip this one 5111 # everything ok! 5112 # find geometry parent 5113 geomroot = geom.skin_instance.skeleton_root.find_chain(geom)[-2] 5114 # reparent 5115 logger.debug("detaching %s from %s" % (geom.name, geomroot.name)) 5116 geomroot.remove_child(geom) 5117 logger.debug("attaching %s to %s" % (geom.name, self.name)) 5118 self.add_child(geom) 5119 # set its new skeleton root 5120 geom.skin_instance.skeleton_root = self 5121 # fix transform 5122 geom.skin_instance.data.set_transform( 5123 geom.get_transform(self).get_inverse(fast=False)) 5124 # and signal that we reparented this block 5125 result.append(geom) 5126 5127 return result, failed
5128
5129 - def get_skinned_geometries(self):
5130 """This function yields all skinned geometries which have self as 5131 skeleton root. 5132 """ 5133 for geom in self.get_global_iterator(): 5134 if (isinstance(geom, NifFormat.NiGeometry) 5135 and geom.is_skin() 5136 and geom.skin_instance.skeleton_root is self): 5137 yield geom
5138
5140 """Call this on the skeleton root of geometries. This function will 5141 transform the geometries, such that all skin data transforms coincide, or 5142 at least coincide partially. 5143 5144 :return: A number quantifying the remaining difference between bind 5145 positions. 5146 :rtype: ``float`` 5147 """ 5148 # get logger 5149 logger = logging.getLogger("pyffi.nif.ninode") 5150 # maps bone name to bind position transform matrix (relative to 5151 # skeleton root) 5152 bone_bind_transform = {} 5153 # find all skinned geometries with self as skeleton root 5154 geoms = list(self.get_skinned_geometries()) 5155 # sort geometries by bone level 5156 # this ensures that "parent" geometries serve as reference for "child" 5157 # geometries 5158 sorted_geoms = [] 5159 for bone in self.get_global_iterator(): 5160 if not isinstance(bone, NifFormat.NiNode): 5161 continue 5162 for geom in geoms: 5163 if not geom in sorted_geoms: 5164 if bone in geom.skin_instance.bones: 5165 sorted_geoms.append(geom) 5166 geoms = sorted_geoms 5167 # now go over all geometries and synchronize their relative bind poses 5168 for geom in geoms: 5169 skininst = geom.skin_instance 5170 skindata = skininst.data 5171 # set difference matrix to identity 5172 diff = NifFormat.Matrix44() 5173 diff.set_identity() 5174 # go over all bones in current geometry, see if it has been visited 5175 # before 5176 for bonenode, bonedata in izip(skininst.bones, skindata.bone_list): 5177 # bonenode can be None; see pyffi issue #3114079 5178 if not bonenode: 5179 continue 5180 if bonenode.name in bone_bind_transform: 5181 # calculate difference 5182 # (see explanation below) 5183 diff = (bonedata.get_transform() 5184 * bone_bind_transform[bonenode.name] 5185 * geom.get_transform(self).get_inverse(fast=False)) 5186 break 5187 5188 if diff.is_identity(): 5189 logger.debug("%s is already in bind position" % geom.name) 5190 else: 5191 logger.info("fixing %s bind position" % geom.name) 5192 # explanation: 5193 # we must set the bonedata transform T' such that its bone bind 5194 # position matrix 5195 # T'^-1 * G 5196 # (where T' = the updated bonedata.get_transform() 5197 # and G = geom.get_transform(self)) 5198 # coincides with the desired matrix 5199 # B = bone_bind_transform[bonenode.name] 5200 # in other words: 5201 # T' = G * B^-1 5202 # or, with diff = D = T * B * G^-1 5203 # T' = D^-1 * T 5204 # to keep the geometry in sync, the vertices and normals must 5205 # be multiplied with D, e.g. v' = v * D 5206 # because the full transform 5207 # v * T * ... = v * D * D^-1 * T * ... = v' * T' * ... 5208 # must be kept invariant 5209 for bonenode, bonedata in izip(skininst.bones, skindata.bone_list): 5210 # bonenode can be None; see pyffi issue #3114079 5211 logger.debug( 5212 "transforming bind position of bone %s" 5213 % bonenode.name if bonenode else "<None>") 5214 bonedata.set_transform(diff.get_inverse(fast=False) 5215 * bonedata.get_transform()) 5216 # transform geometry 5217 logger.debug("transforming vertices and normals") 5218 for vert in geom.data.vertices: 5219 newvert = vert * diff 5220 vert.x = newvert.x 5221 vert.y = newvert.y 5222 vert.z = newvert.z 5223 for norm in geom.data.normals: 5224 newnorm = norm * diff.get_matrix_33() 5225 norm.x = newnorm.x 5226 norm.y = newnorm.y 5227 norm.z = newnorm.z 5228 5229 # store updated bind position for future reference 5230 for bonenode, bonedata in izip(skininst.bones, skindata.bone_list): 5231 # bonenode can be None; see pyffi issue #3114079 5232 if not bonenode: 5233 continue 5234 bone_bind_transform[bonenode.name] = ( 5235 bonedata.get_transform().get_inverse(fast=False) 5236 * geom.get_transform(self)) 5237 5238 # validation: check that bones share bind position 5239 bone_bind_transform = {} 5240 error = 0.0 5241 for geom in geoms: 5242 skininst = geom.skin_instance 5243 skindata = skininst.data 5244 # go over all bones in current geometry, see if it has been visited 5245 # before 5246 for bonenode, bonedata in izip(skininst.bones, skindata.bone_list): 5247 if not bonenode: 5248 # bonenode can be None; see pyffi issue #3114079 5249 continue 5250 if bonenode.name in bone_bind_transform: 5251 # calculate difference 5252 diff = ((bonedata.get_transform().get_inverse(fast=False) 5253 * geom.get_transform(self)) 5254 - bone_bind_transform[bonenode.name]) 5255 # calculate error (sup norm) 5256 error = max(error, 5257 max(max(abs(elem) for elem in row) 5258 for row in diff.as_list())) 5259 else: 5260 bone_bind_transform[bonenode.name] = ( 5261 bonedata.get_transform().get_inverse(fast=False) 5262 * geom.get_transform(self)) 5263 5264 logger.debug("Geometry bind position error is %f" % error) 5265 if error > 1e-3: 5266 logger.warning("Failed to send some geometries to bind position") 5267 return error
5268
5270 """Some nifs (in particular in Morrowind) have geometries that are skinned 5271 but that do not share bones. In such cases, send_geometries_to_bind_position 5272 cannot reposition them. This function will send such geometries to the 5273 position of their root node. 5274 5275 Examples of such nifs are the official Morrowind skins (after merging 5276 skeleton roots). 5277 5278 Returns list of detached geometries that have been moved. 5279 """ 5280 logger = logging.getLogger("pyffi.nif.ninode") 5281 geoms = list(self.get_skinned_geometries()) 5282 5283 # parts the geometries into sets that do not share bone influences 5284 # * first construct sets of bones, merge intersecting sets 5285 # * then check which geometries belong to which set 5286 # (note: bone can be None, see issue #3114079) 5287 bonesets = [ 5288 list(set(bone for bone in geom.skin_instance.bones if bone)) 5289 for geom in geoms] 5290 # the merged flag signals that we are still merging bones 5291 merged = True 5292 while merged: 5293 merged = False 5294 for boneset in bonesets: 5295 for other_boneset in bonesets: 5296 # skip if sets are identical 5297 if other_boneset is boneset: 5298 continue 5299 # if not identical, see if they can be merged 5300 if set(other_boneset) & set(boneset): 5301 # XXX hackish but works 5302 # calculate union 5303 updated_boneset = list(set(other_boneset) | set(boneset)) 5304 # and move all bones into one bone set 5305 del other_boneset[:] 5306 del boneset[:] 5307 boneset += updated_boneset 5308 merged = True 5309 # remove empty bone sets 5310 bonesets = list(boneset for boneset in bonesets if boneset) 5311 logger.debug("bones per partition are") 5312 for boneset in bonesets: 5313 logger.debug(str([bone.name for bone in boneset])) 5314 parts = [[geom for geom in geoms 5315 if set(geom.skin_instance.bones) & set(boneset)] 5316 for boneset in bonesets] 5317 logger.debug("geometries per partition are") 5318 for part in parts: 5319 logger.debug(str([geom.name for geom in part])) 5320 # if there is only one set, we are done 5321 if len(bonesets) <= 1: 5322 logger.debug("no detached geometries") 5323 return [] 5324 5325 # next, for each part, move all geometries so the lowest bone matches the 5326 # node transform 5327 for boneset, part in izip(bonesets, parts): 5328 logger.debug("moving part %s" % str([geom.name for geom in part])) 5329 # find "lowest" bone in the bone set 5330 lowest_dist = None 5331 lowest_bonenode = None 5332 for bonenode in boneset: 5333 dist = len(self.find_chain(bonenode)) 5334 if (lowest_dist is None) or (lowest_dist > dist): 5335 lowest_dist = dist 5336 lowest_bonenode = bonenode 5337 logger.debug("reference bone is %s" % lowest_bonenode.name) 5338 # find a geometry that has this bone 5339 for geom in part: 5340 for bonenode, bonedata in izip(geom.skin_instance.bones, 5341 geom.skin_instance.data.bone_list): 5342 if bonenode is lowest_bonenode: 5343 lowest_geom = geom 5344 lowest_bonedata = bonedata 5345 break 5346 else: 5347 continue 5348 break 5349 else: 5350 raise RuntimeError("no reference geometry with this bone: bug?") 5351 # calculate matrix 5352 diff = (lowest_bonedata.get_transform() 5353 * lowest_bonenode.get_transform(self) 5354 * lowest_geom.get_transform(self).get_inverse(fast=False)) 5355 if diff.is_identity(): 5356 logger.debug("%s is already in node position" 5357 % lowest_bonenode.name) 5358 continue 5359 # now go over all geometries and synchronize their position to the 5360 # reference bone 5361 for geom in part: 5362 logger.info("moving %s to node position" % geom.name) 5363 # XXX we're using this trick a few times now 5364 # XXX move it to a separate NiGeometry function 5365 skininst = geom.skin_instance 5366 skindata = skininst.data 5367 # explanation: 5368 # we must set the bonedata transform T' such that its bone bind 5369 # position matrix 5370 # T'^-1 * G 5371 # (where T' = the updated lowest_bonedata.get_transform() 5372 # and G = geom.get_transform(self)) 5373 # coincides with the desired matrix 5374 # B = lowest_bonenode.get_transform(self) 5375 # in other words: 5376 # T' = G * B^-1 5377 # or, with diff = D = T * B * G^-1 5378 # T' = D^-1 * T 5379 # to keep the geometry in sync, the vertices and normals must 5380 # be multiplied with D, e.g. v' = v * D 5381 # because the full transform 5382 # v * T * ... = v * D * D^-1 * T * ... = v' * T' * ... 5383 # must be kept invariant 5384 for bonenode, bonedata in izip(skininst.bones, skindata.bone_list): 5385 logger.debug("transforming bind position of bone %s" 5386 % bonenode.name) 5387 bonedata.set_transform(diff.get_inverse(fast=False) 5388 * bonedata.get_transform()) 5389 # transform geometry 5390 logger.debug("transforming vertices and normals") 5391 for vert in geom.data.vertices: 5392 newvert = vert * diff 5393 vert.x = newvert.x 5394 vert.y = newvert.y 5395 vert.z = newvert.z 5396 for norm in geom.data.normals: 5397 newnorm = norm * diff.get_matrix_33() 5398 norm.x = newnorm.x 5399 norm.y = newnorm.y 5400 norm.z = newnorm.z
5401
5403 """This function will send all bones of geometries of this skeleton root 5404 to their bind position. For best results, call 5405 L{send_geometries_to_bind_position} first. 5406 5407 :return: A number quantifying the remaining difference between bind 5408 positions. 5409 :rtype: ``float`` 5410 """ 5411 # get logger 5412 logger = logging.getLogger("pyffi.nif.ninode") 5413 # check all bones and bone datas to see if a bind position exists 5414 bonelist = [] 5415 error = 0.0 5416 geoms = list(self.get_skinned_geometries()) 5417 for geom in geoms: 5418 skininst = geom.skin_instance 5419 skindata = skininst.data 5420 for bonenode, bonedata in izip(skininst.bones, skindata.bone_list): 5421 # bonenode can be None; see pyffi issue #3114079 5422 if not bonenode: 5423 continue 5424 # make sure all bone data of shared bones coincides 5425 for othergeom, otherbonenode, otherbonedata in bonelist: 5426 if bonenode is otherbonenode: 5427 diff = ((otherbonedata.get_transform().get_inverse(fast=False) 5428 * 5429 othergeom.get_transform(self)) 5430 - 5431 (bonedata.get_transform().get_inverse(fast=False) 5432 * 5433 geom.get_transform(self))) 5434 if diff.sup_norm() > 1e-3: 5435 logger.warning("Geometries %s and %s do not share the same bind position: bone %s will be sent to a position matching only one of these" % (geom.name, othergeom.name, bonenode.name)) 5436 # break the loop 5437 break 5438 else: 5439 # the loop did not break, so the bone was not yet added 5440 # add it now 5441 logger.debug("Found bind position data for %s" % bonenode.name) 5442 bonelist.append((geom, bonenode, bonedata)) 5443 5444 # the algorithm simply makes all transforms correct by changing 5445 # each local bone matrix in such a way that the global matrix 5446 # relative to the skeleton root matches the skinning information 5447 5448 # this algorithm is numerically most stable if bones are traversed 5449 # in hierarchical order, so first sort the bones 5450 sorted_bonelist = [] 5451 for node in self.tree(): 5452 if not isinstance(node, NifFormat.NiNode): 5453 continue 5454 for geom, bonenode, bonedata in bonelist: 5455 if node is bonenode: 5456 sorted_bonelist.append((geom, bonenode, bonedata)) 5457 bonelist = sorted_bonelist 5458 # now reposition the bones 5459 for geom, bonenode, bonedata in bonelist: 5460 # explanation: 5461 # v * CHILD * PARENT * ... 5462 # = v * CHILD * DIFF^-1 * DIFF * PARENT * ... 5463 # and now choose DIFF such that DIFF * PARENT * ... = desired transform 5464 5465 # calculate desired transform relative to skeleton root 5466 # transform is DIFF * PARENT 5467 transform = (bonedata.get_transform().get_inverse(fast=False) 5468 * geom.get_transform(self)) 5469 # calculate difference 5470 diff = transform * bonenode.get_transform(self).get_inverse(fast=False) 5471 if not diff.is_identity(): 5472 logger.info("Sending %s to bind position" 5473 % bonenode.name) 5474 # fix transform of this node 5475 bonenode.set_transform(diff * bonenode.get_transform()) 5476 # fix transform of all its children 5477 diff_inv = diff.get_inverse(fast=False) 5478 for childnode in bonenode.children: 5479 if childnode: 5480 childnode.set_transform(childnode.get_transform() * diff_inv) 5481 else: 5482 logger.debug("%s is already in bind position" 5483 % bonenode.name) 5484 5485 # validate 5486 error = 0.0 5487 diff_error = 0.0 5488 for geom in geoms: 5489 skininst = geom.skin_instance 5490 skindata = skininst.data 5491 # calculate geometry transform 5492 geomtransform = geom.get_transform(self) 5493 # check skin data fields (also see NiGeometry.update_bind_position) 5494 for i, bone in enumerate(skininst.bones): 5495 # bone can be None; see pyffi issue #3114079 5496 if bone is None: 5497 continue 5498 diff = ((skindata.bone_list[i].get_transform().get_inverse(fast=False) 5499 * geomtransform) 5500 - bone.get_transform(self)) 5501 # calculate error (sup norm) 5502 diff_error = max(max(abs(elem) for elem in row) 5503 for row in diff.as_list()) 5504 if diff_error > 1e-3: 5505 logger.warning( 5506 "Failed to set bind position of bone %s for geometry %s (error is %f)" 5507 % (bone.name, geom.name, diff_error)) 5508 error = max(error, diff_error) 5509 5510 logger.debug("Bone bind position maximal error is %f" % error) 5511 if error > 1e-3: 5512 logger.warning("Failed to send some bones to bind position") 5513 return error
5514
5515 - class NiObjectNET:
5516 - def add_extra_data(self, extrablock):
5517 """Add block to extra data list and extra data chain. It is good practice 5518 to ensure that the extra data has empty next_extra_data field when adding it 5519 to avoid loops in the hierarchy.""" 5520 # add to the list 5521 num_extra = self.num_extra_data_list 5522 self.num_extra_data_list = num_extra + 1 5523 self.extra_data_list.update_size() 5524 self.extra_data_list[num_extra] = extrablock 5525 # add to the chain 5526 if not self.extra_data: 5527 self.extra_data = extrablock 5528 else: 5529 lastextra = self.extra_data 5530 while lastextra.next_extra_data: 5531 lastextra = lastextra.next_extra_data 5532 lastextra.next_extra_data = extrablock
5533
5534 - def remove_extra_data(self, extrablock):
5535 """Remove block from extra data list and extra data chain. 5536 5537 >>> from pyffi.formats.nif import NifFormat 5538 >>> block = NifFormat.NiNode() 5539 >>> block.num_extra_data_list = 3 5540 >>> block.extra_data_list.update_size() 5541 >>> extrablock = NifFormat.NiStringExtraData() 5542 >>> block.extra_data_list[1] = extrablock 5543 >>> block.remove_extra_data(extrablock) 5544 >>> [extra for extra in block.extra_data_list] 5545 [None, None] 5546 """ 5547 # remove from list 5548 new_extra_list = [] 5549 for extraother in self.extra_data_list: 5550 if not extraother is extrablock: 5551 new_extra_list.append(extraother) 5552 self.num_extra_data_list = len(new_extra_list) 5553 self.extra_data_list.update_size() 5554 for i, extraother in enumerate(new_extra_list): 5555 self.extra_data_list[i] = extraother 5556 # remove from chain 5557 if self.extra_data is extrablock: 5558 self.extra_data = extrablock.next_extra_data 5559 lastextra = self.extra_data 5560 while lastextra: 5561 if lastextra.next_extra_data is extrablock: 5562 lastextra.next_extra_data = lastextra.next_extra_data.next_extra_data 5563 lastextra = lastextra.next_extra_data
5564
5565 - def get_extra_datas(self):
5566 """Get a list of all extra data blocks.""" 5567 xtras = [xtra for xtra in self.extra_data_list] 5568 xtra = self.extra_data 5569 while xtra: 5570 if not xtra in self.extra_data_list: 5571 xtras.append(xtra) 5572 xtra = xtra.next_extra_data 5573 return xtras
5574
5575 - def set_extra_datas(self, extralist):
5576 """Set all extra data blocks from given list (erases existing data). 5577 5578 >>> from pyffi.formats.nif import NifFormat 5579 >>> node = NifFormat.NiNode() 5580 >>> extra1 = NifFormat.NiExtraData() 5581 >>> extra1.name = "hello" 5582 >>> extra2 = NifFormat.NiExtraData() 5583 >>> extra2.name = "world" 5584 >>> node.get_extra_datas() 5585 [] 5586 >>> node.set_extra_datas([extra1, extra2]) 5587 >>> [extra.name for extra in node.get_extra_datas()] 5588 ['hello', 'world'] 5589 >>> [extra.name for extra in node.extra_data_list] 5590 ['hello', 'world'] 5591 >>> node.extra_data is extra1 5592 True 5593 >>> extra1.next_extra_data is extra2 5594 True 5595 >>> extra2.next_extra_data is None 5596 True 5597 >>> node.set_extra_datas([]) 5598 >>> node.get_extra_datas() 5599 [] 5600 >>> # now set them the other way around 5601 >>> node.set_extra_datas([extra2, extra1]) 5602 >>> [extra.name for extra in node.get_extra_datas()] 5603 ['world', 'hello'] 5604 >>> [extra.name for extra in node.extra_data_list] 5605 ['world', 'hello'] 5606 >>> node.extra_data is extra2 5607 True 5608 >>> extra2.next_extra_data is extra1 5609 True 5610 >>> extra1.next_extra_data is None 5611 True 5612 5613 :param extralist: List of extra data blocks to add. 5614 :type extralist: ``list`` of L{NifFormat.NiExtraData} 5615 """ 5616 # set up extra data list 5617 self.num_extra_data_list = len(extralist) 5618 self.extra_data_list.update_size() 5619 for i, extra in enumerate(extralist): 5620 self.extra_data_list[i] = extra 5621 # set up extra data chain 5622 # first, kill the current chain 5623 self.extra_data = None 5624 # now reconstruct it 5625 if extralist: 5626 self.extra_data = extralist[0] 5627 lastextra = self.extra_data 5628 for extra in extralist[1:]: 5629 lastextra.next_extra_data = extra 5630 lastextra = extra 5631 lastextra.next_extra_data = None
5632
5633 - def add_controller(self, ctrlblock):
5634 """Add block to controller chain and set target of controller to self.""" 5635 if not self.controller: 5636 self.controller = ctrlblock 5637 else: 5638 lastctrl = self.controller 5639 while lastctrl.next_controller: 5640 lastctrl = lastctrl.next_controller 5641 lastctrl.next_controller = ctrlblock 5642 # set the target of the controller 5643 ctrlblock.target = self
5644
5645 - def get_controllers(self):
5646 """Get a list of all controllers.""" 5647 ctrls = [] 5648 ctrl = self.controller 5649 while ctrl: 5650 ctrls.append(ctrl) 5651 ctrl = ctrl.next_controller 5652 return ctrls
5653
5654 - def add_integer_extra_data(self, name, value):
5655 """Add a particular extra integer data block.""" 5656 extra = NifFormat.NiIntegerExtraData() 5657 extra.name = name 5658 extra.integer_data = value 5659 self.add_extra_data(extra)
5660
5661 - class NiObject:
5662 - def find(self, block_name = None, block_type = None):
5663 # does this block match the search criteria? 5664 if block_name and block_type: 5665 if isinstance(self, block_type): 5666 try: 5667 if block_name == self.name: return self 5668 except AttributeError: 5669 pass 5670 elif block_name: 5671 try: 5672 if block_name == self.name: return self 5673 except AttributeError: 5674 pass 5675 elif block_type: 5676 if isinstance(self, block_type): return self 5677 5678 # ok, this block is not a match, so check further down in tree 5679 for child in self.get_refs(): 5680 blk = child.find(block_name, block_type) 5681 if blk: return blk 5682 5683 return None
5684
5685 - def find_chain(self, block, block_type = None):
5686 """Finds a chain of blocks going from C{self} to C{block}. If found, 5687 self is the first element and block is the last element. If no branch 5688 found, returns an empty list. Does not check whether there is more 5689 than one branch; if so, the first one found is returned. 5690 5691 :param block: The block to find a chain to. 5692 :param block_type: The type that blocks should have in this chain.""" 5693 5694 if self is block: return [self] 5695 for child in self.get_refs(): 5696 if block_type and not isinstance(child, block_type): continue 5697 child_chain = child.find_chain(block, block_type) 5698 if child_chain: 5699 return [self] + child_chain 5700 5701 return []
5702
5703 - def apply_scale(self, scale):
5704 """Scale data in this block. This implementation does nothing. 5705 Override this method if it contains geometry data that can be 5706 scaled. 5707 """ 5708 pass
5709
5710 - def tree(self, block_type = None, follow_all = True, unique = False):
5711 """A generator for parsing all blocks in the tree (starting from and 5712 including C{self}). 5713 5714 :param block_type: If not ``None``, yield only blocks of the type C{block_type}. 5715 :param follow_all: If C{block_type} is not ``None``, then if this is ``True`` the function will parse the whole tree. Otherwise, the function will not follow branches that start by a non-C{block_type} block. 5716 5717 :param unique: Whether the generator can return the same block twice or not.""" 5718 # unique blocks: reduce this to the case of non-unique blocks 5719 if unique: 5720 block_list = [] 5721 for block in self.tree(block_type = block_type, follow_all = follow_all, unique = False): 5722 if not block in block_list: 5723 yield block 5724 block_list.append(block) 5725 return 5726 5727 # yield self 5728 if not block_type: 5729 yield self 5730 elif isinstance(self, block_type): 5731 yield self 5732 elif not follow_all: 5733 return # don't recurse further 5734 5735 # yield tree attached to each child 5736 for child in self.get_refs(): 5737 for block in child.tree(block_type = block_type, follow_all = follow_all): 5738 yield block
5739
5740 - def _validateTree(self):
5741 """Raises ValueError if there is a cycle in the tree.""" 5742 # If the tree is parsed, then each block should be visited once. 5743 # However, as soon as some cycle is present, parsing the tree 5744 # will visit some child more than once (and as a consequence, infinitely 5745 # many times). So, walk the reference tree and check that every block is 5746 # only visited once. 5747 children = [] 5748 for child in self.tree(): 5749 if child in children: 5750 raise ValueError('cyclic references detected') 5751 children.append(child)
5752
5753 - def is_interchangeable(self, other):
5754 """Are the two blocks interchangeable? 5755 5756 @todo: Rely on AnyType, SimpleType, ComplexType, etc. implementation. 5757 """ 5758 if isinstance(self, (NifFormat.NiProperty, NifFormat.NiSourceTexture)): 5759 # use hash for properties and source textures 5760 return ((self.__class__ is other.__class__) 5761 and (self.get_hash() == other.get_hash())) 5762 else: 5763 # for blocks with references: quick check only 5764 return self is other
5765
5766 - class NiMaterialProperty:
5767 - def is_interchangeable(self, other):
5768 """Are the two material blocks interchangeable?""" 5769 specialnames = ("envmap2", "envmap", "skin", "hair", 5770 "dynalpha", "hidesecret", "lava") 5771 if self.__class__ is not other.__class__: 5772 return False 5773 if (self.name.lower() in specialnames 5774 or other.name.lower() in specialnames): 5775 # do not ignore name 5776 return self.get_hash() == other.get_hash() 5777 else: 5778 # ignore name 5779 return self.get_hash()[1:] == other.get_hash()[1:]
5780
5781 - class ATextureRenderData:
5782 - def save_as_dds(self, stream):
5783 """Save image as DDS file.""" 5784 # set up header and pixel data 5785 data = pyffi.formats.dds.DdsFormat.Data() 5786 header = data.header 5787 pixeldata = data.pixeldata 5788 5789 # create header, depending on the format 5790 if self.pixel_format in (NifFormat.PixelFormat.PX_FMT_RGB8, 5791 NifFormat.PixelFormat.PX_FMT_RGBA8): 5792 # uncompressed RGB(A) 5793 header.flags.caps = 1 5794 header.flags.height = 1 5795 header.flags.width = 1 5796 header.flags.pixel_format = 1 5797 header.flags.mipmap_count = 1 5798 header.flags.linear_size = 1 5799 header.height = self.mipmaps[0].height 5800 header.width = self.mipmaps[0].width 5801 header.linear_size = len(self.pixel_data) 5802 header.mipmap_count = len(self.mipmaps) 5803 header.pixel_format.flags.rgb = 1 5804 header.pixel_format.bit_count = self.bits_per_pixel 5805 if not self.channels: 5806 header.pixel_format.r_mask = self.red_mask 5807 header.pixel_format.g_mask = self.green_mask 5808 header.pixel_format.b_mask = self.blue_mask 5809 header.pixel_format.a_mask = self.alpha_mask 5810 else: 5811 bit_pos = 0 5812 for i, channel in enumerate(self.channels): 5813 mask = (2 ** channel.bits_per_channel - 1) << bit_pos 5814 if channel.type == NifFormat.ChannelType.CHNL_RED: 5815 header.pixel_format.r_mask = mask 5816 elif channel.type == NifFormat.ChannelType.CHNL_GREEN: 5817 header.pixel_format.g_mask = mask 5818 elif channel.type == NifFormat.ChannelType.CHNL_BLUE: 5819 header.pixel_format.b_mask = mask 5820 elif channel.type == NifFormat.ChannelType.CHNL_ALPHA: 5821 header.pixel_format.a_mask = mask 5822 bit_pos += channel.bits_per_channel 5823 header.caps_1.complex = 1 5824 header.caps_1.texture = 1 5825 header.caps_1.mipmap = 1 5826 if self.pixel_data: 5827 # used in older nif versions 5828 pixeldata.set_value(self.pixel_data) 5829 else: 5830 # used in newer nif versions 5831 pixeldata.set_value(''.join(self.pixel_data_matrix)) 5832 elif self.pixel_format == NifFormat.PixelFormat.PX_FMT_DXT1: 5833 # format used in Megami Tensei: Imagine and Bully SE 5834 header.flags.caps = 1 5835 header.flags.height = 1 5836 header.flags.width = 1 5837 header.flags.pixel_format = 1 5838 header.flags.mipmap_count = 1 5839 header.flags.linear_size = 0 5840 header.height = self.mipmaps[0].height 5841 header.width = self.mipmaps[0].width 5842 header.linear_size = 0 5843 header.mipmap_count = len(self.mipmaps) 5844 header.pixel_format.flags.four_c_c = 1 5845 header.pixel_format.four_c_c = pyffi.formats.dds.DdsFormat.FourCC.DXT1 5846 header.pixel_format.bit_count = 0 5847 header.pixel_format.r_mask = 0 5848 header.pixel_format.g_mask = 0 5849 header.pixel_format.b_mask = 0 5850 header.pixel_format.a_mask = 0 5851 header.caps_1.complex = 1 5852 header.caps_1.texture = 1 5853 header.caps_1.mipmap = 1 5854 if isinstance(self, 5855 NifFormat.NiPersistentSrcTextureRendererData): 5856 pixeldata.set_value( 5857 ''.join( 5858 ''.join([chr(x) for x in tex]) 5859 for tex in self.pixel_data)) 5860 else: 5861 pixeldata.set_value(''.join(self.pixel_data_matrix)) 5862 elif self.pixel_format in (NifFormat.PixelFormat.PX_FMT_DXT5, 5863 NifFormat.PixelFormat.PX_FMT_DXT5_ALT): 5864 # format used in Megami Tensei: Imagine 5865 header.flags.caps = 1 5866 header.flags.height = 1 5867 header.flags.width = 1 5868 header.flags.pixel_format = 1 5869 header.flags.mipmap_count = 1 5870 header.flags.linear_size = 0 5871 header.height = self.mipmaps[0].height 5872 header.width = self.mipmaps[0].width 5873 header.linear_size = 0 5874 header.mipmap_count = len(self.mipmaps) 5875 header.pixel_format.flags.four_c_c = 1 5876 header.pixel_format.four_c_c = pyffi.formats.dds.DdsFormat.FourCC.DXT5 5877 header.pixel_format.bit_count = 0 5878 header.pixel_format.r_mask = 0 5879 header.pixel_format.g_mask = 0 5880 header.pixel_format.b_mask = 0 5881 header.pixel_format.a_mask = 0 5882 header.caps_1.complex = 1 5883 header.caps_1.texture = 1 5884 header.caps_1.mipmap = 1 5885 pixeldata.set_value(''.join(self.pixel_data_matrix)) 5886 else: 5887 raise ValueError( 5888 "cannot save pixel format %i as DDS" % self.pixel_format) 5889 5890 data.write(stream)
5891
5892 - class NiSkinData:
5893 - def get_transform(self):
5894 """Return scale, rotation, and translation into a single 4x4 matrix.""" 5895 mat = NifFormat.Matrix44() 5896 mat.set_scale_rotation_translation(self.scale, self.rotation, self.translation) 5897 return mat
5898
5899 - def set_transform(self, mat):
5900 """Set rotation, transform, and velocity.""" 5901 scale, rotation, translation = mat.get_scale_rotation_translation() 5902 5903 self.scale = scale 5904 5905 self.rotation.m_11 = rotation.m_11 5906 self.rotation.m_12 = rotation.m_12 5907 self.rotation.m_13 = rotation.m_13 5908 self.rotation.m_21 = rotation.m_21 5909 self.rotation.m_22 = rotation.m_22 5910 self.rotation.m_23 = rotation.m_23 5911 self.rotation.m_31 = rotation.m_31 5912 self.rotation.m_32 = rotation.m_32 5913 self.rotation.m_33 = rotation.m_33 5914 5915 self.translation.x = translation.x 5916 self.translation.y = translation.y 5917 self.translation.z = translation.z
5918
5919 - def apply_scale(self, scale):
5920 """Apply scale factor on data. 5921 5922 >>> from pyffi.formats.nif import NifFormat 5923 >>> id44 = NifFormat.Matrix44() 5924 >>> id44.set_identity() 5925 >>> skelroot = NifFormat.NiNode() 5926 >>> skelroot.name = 'Scene Root' 5927 >>> skelroot.set_transform(id44) 5928 >>> bone1 = NifFormat.NiNode() 5929 >>> bone1.name = 'bone1' 5930 >>> bone1.set_transform(id44) 5931 >>> bone1.translation.x = 10 5932 >>> skelroot.add_child(bone1) 5933 >>> geom = NifFormat.NiTriShape() 5934 >>> geom.set_transform(id44) 5935 >>> skelroot.add_child(geom) 5936 >>> skininst = NifFormat.NiSkinInstance() 5937 >>> geom.skin_instance = skininst 5938 >>> skininst.skeleton_root = skelroot 5939 >>> skindata = NifFormat.NiSkinData() 5940 >>> skininst.data = skindata 5941 >>> skindata.set_transform(id44) 5942 >>> geom.add_bone(bone1, {}) 5943 >>> geom.update_bind_position() 5944 >>> bone1.translation.x 5945 10.0 5946 >>> skindata.bone_list[0].translation.x 5947 -10.0 5948 >>> import pyffi.spells.nif.fix 5949 >>> import pyffi.spells.nif 5950 >>> data = NifFormat.Data() 5951 >>> data.roots = [skelroot] 5952 >>> toaster = pyffi.spells.nif.NifToaster() 5953 >>> toaster.scale = 0.1 5954 >>> pyffi.spells.nif.fix.SpellScale(data=data, toaster=toaster).recurse() 5955 pyffi.toaster:INFO:--- fix_scale --- 5956 pyffi.toaster:INFO: scaling by factor 0.100000 5957 pyffi.toaster:INFO: ~~~ NiNode [Scene Root] ~~~ 5958 pyffi.toaster:INFO: ~~~ NiNode [bone1] ~~~ 5959 pyffi.toaster:INFO: ~~~ NiTriShape [] ~~~ 5960 pyffi.toaster:INFO: ~~~ NiSkinInstance [] ~~~ 5961 pyffi.toaster:INFO: ~~~ NiSkinData [] ~~~ 5962 >>> bone1.translation.x 5963 1.0 5964 >>> skindata.bone_list[0].translation.x 5965 -1.0 5966 """ 5967 5968 self.translation.x *= scale 5969 self.translation.y *= scale 5970 self.translation.z *= scale 5971 5972 for skindata in self.bone_list: 5973 skindata.translation.x *= scale 5974 skindata.translation.y *= scale 5975 skindata.translation.z *= scale 5976 skindata.bounding_sphere_offset.x *= scale 5977 skindata.bounding_sphere_offset.y *= scale 5978 skindata.bounding_sphere_offset.z *= scale 5979 skindata.bounding_sphere_radius *= scale
5980
5981 - class NiTransformInterpolator:
5982 - def apply_scale(self, scale):
5983 """Apply scale factor <scale> on data.""" 5984 # apply scale on translation 5985 self.translation.x *= scale 5986 self.translation.y *= scale 5987 self.translation.z *= scale
5988
5989 - class NiTriBasedGeomData:
5990 - def is_interchangeable(self, other):
5991 """Heuristically checks if two NiTriBasedGeomData blocks describe 5992 the same geometry, that is, if they can be used interchangeably in 5993 a nif file without affecting the rendering. The check is not fool 5994 proof but has shown to work in most practical cases. 5995 5996 :param other: Another geometry data block. 5997 :type other: L{NifFormat.NiTriBasedGeomData} (if it has another type 5998 then the function will always return ``False``) 5999 :return: ``True`` if the geometries are equivalent, ``False`` otherwise. 6000 """ 6001 # check for object identity 6002 if self is other: 6003 return True 6004 6005 # type check 6006 if not isinstance(other, NifFormat.NiTriBasedGeomData): 6007 return False 6008 6009 # check class 6010 if (not isinstance(self, other.__class__) 6011 or not isinstance(other, self.__class__)): 6012 return False 6013 6014 # check some trivial things first 6015 for attribute in ( 6016 "num_vertices", "keep_flags", "compress_flags", "has_vertices", 6017 "num_uv_sets", "has_normals", "center", "radius", 6018 "has_vertex_colors", "has_uv", "consistency_flags"): 6019 if getattr(self, attribute) != getattr(other, attribute): 6020 return False 6021 6022 # check vertices (this includes uvs, vcols and normals) 6023 verthashes1 = [hsh for hsh in self.get_vertex_hash_generator()] 6024 verthashes2 = [hsh for hsh in other.get_vertex_hash_generator()] 6025 for hash1 in verthashes1: 6026 if not hash1 in verthashes2: 6027 return False 6028 for hash2 in verthashes2: 6029 if not hash2 in verthashes1: 6030 return False 6031 6032 # check triangle list 6033 triangles1 = [tuple(verthashes1[i] for i in tri) 6034 for tri in self.get_triangles()] 6035 triangles2 = [tuple(verthashes2[i] for i in tri) 6036 for tri in other.get_triangles()] 6037 for tri1 in triangles1: 6038 if not tri1 in triangles2: 6039 return False 6040 for tri2 in triangles2: 6041 if not tri2 in triangles1: 6042 return False 6043 6044 # looks pretty identical! 6045 return True
6046
6047 - def get_triangle_indices(self, triangles):
6048 """Yield list of triangle indices (relative to 6049 self.get_triangles()) of given triangles. Degenerate triangles in 6050 the list are assigned index ``None``. 6051 6052 >>> from pyffi.formats.nif import NifFormat 6053 >>> geomdata = NifFormat.NiTriShapeData() 6054 >>> geomdata.set_triangles([(0,1,2),(1,2,3),(2,3,4)]) 6055 >>> list(geomdata.get_triangle_indices([(1,2,3)])) 6056 [1] 6057 >>> list(geomdata.get_triangle_indices([(3,1,2)])) 6058 [1] 6059 >>> list(geomdata.get_triangle_indices([(2,3,1)])) 6060 [1] 6061 >>> list(geomdata.get_triangle_indices([(1,2,0),(4,2,3)])) 6062 [0, 2] 6063 >>> list(geomdata.get_triangle_indices([(0,0,0),(4,2,3)])) 6064 [None, 2] 6065 >>> list(geomdata.get_triangle_indices([(0,3,4),(4,2,3)])) # doctest: +ELLIPSIS 6066 Traceback (most recent call last): 6067 ... 6068 ValueError: ... 6069 6070 :param triangles: An iterable of triangles to check. 6071 :type triangles: iterator or list of tuples of three ints 6072 """ 6073 def triangleHash(triangle): 6074 """Calculate hash of a non-degenerate triangle. 6075 Returns ``None`` if the triangle is degenerate. 6076 """ 6077 if triangle[0] < triangle[1] and triangle[0] < triangle[2]: 6078 return hash((triangle[0], triangle[1], triangle[2])) 6079 elif triangle[1] < triangle[0] and triangle[1] < triangle[2]: 6080 return hash((triangle[1], triangle[2], triangle[0])) 6081 elif triangle[2] < triangle[0] and triangle[2] < triangle[1]: 6082 return hash((triangle[2], triangle[0], triangle[1]))
6083 6084 # calculate hashes of all triangles in the geometry 6085 self_triangles_hashes = [ 6086 triangleHash(triangle) for triangle in self.get_triangles()] 6087 6088 # calculate index of each triangle in the list of triangles 6089 for triangle in triangles: 6090 triangle_hash = triangleHash(triangle) 6091 if triangle_hash is None: 6092 yield None 6093 else: 6094 yield self_triangles_hashes.index(triangle_hash)
6095
6096 - class NiTriBasedGeom:
6097 - def get_tangent_space(self):
6098 """Return iterator over normal, tangent, bitangent vectors. 6099 If the block has no tangent space, then returns None. 6100 """ 6101 6102 def bytes2vectors(data, pos, num): 6103 for i in xrange(num): 6104 # data[pos:pos+12] is not really well implemented, so do this 6105 vecdata = ''.join(data[j] for j in xrange(pos, pos + 12)) 6106 vec = NifFormat.Vector3() 6107 # XXX _byte_order! assuming little endian 6108 vec.x, vec.y, vec.z = struct.unpack('<fff', vecdata) 6109 yield vec 6110 pos += 12
6111 6112 6113 if self.data.num_vertices == 0: 6114 return () 6115 6116 if not self.data.normals: 6117 #raise ValueError('geometry has no normals') 6118 return None 6119 6120 if (not self.data.tangents) or (not self.data.bitangents): 6121 # no tangents and bitangents at the usual location 6122 # perhaps there is Oblivion style data? 6123 for extra in self.get_extra_datas(): 6124 if isinstance(extra, NifFormat.NiBinaryExtraData): 6125 if extra.name == 'Tangent space (binormal & tangent vectors)': 6126 break 6127 else: 6128 #raise ValueError('geometry has no tangents') 6129 return None 6130 if 24 * self.data.num_vertices != len(extra.binary_data): 6131 raise ValueError( 6132 'tangent space data has invalid size, expected %i bytes but got %i' 6133 % (24 * self.data.num_vertices, len(extra.binary_data))) 6134 tangents = bytes2vectors(extra.binary_data, 6135 0, 6136 self.data.num_vertices) 6137 bitangents = bytes2vectors(extra.binary_data, 6138 12 * self.data.num_vertices, 6139 self.data.num_vertices) 6140 else: 6141 tangents = self.data.tangents 6142 bitangents = self.data.bitangents 6143 6144 return izip(self.data.normals, tangents, bitangents)
6145
6146 - def update_tangent_space( 6147 self, as_extra=None, 6148 vertexprecision=3, normalprecision=3):
6149 """Recalculate tangent space data. 6150 6151 :param as_extra: Whether to store the tangent space data as extra data 6152 (as in Oblivion) or not (as in Fallout 3). If not set, switches to 6153 Oblivion if an extra data block is found, otherwise does default. 6154 Set it to override this detection (for example when using this 6155 function to create tangent space data) and force behaviour. 6156 """ 6157 # check that self.data exists and is valid 6158 if not isinstance(self.data, NifFormat.NiTriBasedGeomData): 6159 raise ValueError( 6160 'cannot update tangent space of a geometry with %s data' 6161 %(self.data.__class__ if self.data else 'no')) 6162 6163 verts = self.data.vertices 6164 norms = self.data.normals 6165 if len(self.data.uv_sets) > 0: 6166 uvs = self.data.uv_sets[0] 6167 else: 6168 # no uv sets so no tangent space 6169 # we clear the tangents space flag just 6170 # happens in Fallout NV 6171 # meshes/architecture/bouldercity/arcadeendl.nif 6172 # (see issue #3218751) 6173 self.data.num_uv_sets &= ~4096 6174 self.data.bs_num_uv_sets &= ~4096 6175 return 6176 6177 # check that shape has norms and uvs 6178 if len(uvs) == 0 or len(norms) == 0: return 6179 6180 # identify identical (vertex, normal) pairs to avoid issues along 6181 # uv seams due to vertex duplication 6182 # implementation note: uvprecision and vcolprecision 0 6183 # should be enough, but use -2 just to be really sure 6184 # that this is ignored 6185 v_hash_map = list( 6186 self.data.get_vertex_hash_generator( 6187 vertexprecision=vertexprecision, 6188 normalprecision=normalprecision, 6189 uvprecision=-2, 6190 vcolprecision=-2)) 6191 6192 # tangent and binormal dictionaries by vertex hash 6193 bin = dict((h, NifFormat.Vector3()) for h in v_hash_map) 6194 tan = dict((h, NifFormat.Vector3()) for h in v_hash_map) 6195 6196 # calculate tangents and binormals from vertex and texture coordinates 6197 for t1, t2, t3 in self.data.get_triangles(): 6198 # find hash values 6199 h1 = v_hash_map[t1] 6200 h2 = v_hash_map[t2] 6201 h3 = v_hash_map[t3] 6202 # skip degenerate triangles 6203 if h1 == h2 or h2 == h3 or h3 == h1: 6204 continue 6205 6206 v_1 = verts[t1] 6207 v_2 = verts[t2] 6208 v_3 = verts[t3] 6209 w1 = uvs[t1] 6210 w2 = uvs[t2] 6211 w3 = uvs[t3] 6212 v_2v_1 = v_2 - v_1 6213 v_3v_1 = v_3 - v_1 6214 w2w1 = w2 - w1 6215 w3w1 = w3 - w1 6216 6217 # surface of triangle in texture space 6218 r = w2w1.u * w3w1.v - w3w1.u * w2w1.v 6219 6220 # sign of surface 6221 r_sign = (1 if r >= 0 else -1) 6222 6223 # contribution of this triangle to tangents and binormals 6224 sdir = NifFormat.Vector3() 6225 sdir.x = (w3w1.v * v_2v_1.x - w2w1.v * v_3v_1.x) * r_sign 6226 sdir.y = (w3w1.v * v_2v_1.y - w2w1.v * v_3v_1.y) * r_sign 6227 sdir.z = (w3w1.v * v_2v_1.z - w2w1.v * v_3v_1.z) * r_sign 6228 try: 6229 sdir.normalize() 6230 except ZeroDivisionError: # catches zero vector 6231 continue # skip triangle 6232 except ValueError: # catches invalid data 6233 continue # skip triangle 6234 6235 tdir = NifFormat.Vector3() 6236 tdir.x = (w2w1.u * v_3v_1.x - w3w1.u * v_2v_1.x) * r_sign 6237 tdir.y = (w2w1.u * v_3v_1.y - w3w1.u * v_2v_1.y) * r_sign 6238 tdir.z = (w2w1.u * v_3v_1.z - w3w1.u * v_2v_1.z) * r_sign 6239 try: 6240 tdir.normalize() 6241 except ZeroDivisionError: # catches zero vector 6242 continue # skip triangle 6243 except ValueError: # catches invalid data 6244 continue # skip triangle 6245 6246 # vector combination algorithm could possibly be improved 6247 for h in [h1, h2, h3]: 6248 # addition inlined for speed 6249 tanh = tan[h] 6250 tanh.x += tdir.x 6251 tanh.y += tdir.y 6252 tanh.z += tdir.z 6253 binh = bin[h] 6254 binh.x += sdir.x 6255 binh.y += sdir.y 6256 binh.z += sdir.z 6257 6258 xvec = NifFormat.Vector3() 6259 xvec.x = 1.0 6260 xvec.y = 0.0 6261 xvec.z = 0.0 6262 yvec = NifFormat.Vector3() 6263 yvec.x = 0.0 6264 yvec.y = 1.0 6265 yvec.z = 0.0 6266 for n, h in izip(norms, v_hash_map): 6267 binh = bin[h] 6268 tanh = tan[h] 6269 try: 6270 n.normalize() 6271 except (ValueError, ZeroDivisionError): 6272 # this happens if the normal has NAN values or is zero 6273 # just pick something in that case 6274 n = yvec 6275 try: 6276 # turn n, bin, tan into a base via Gram-Schmidt 6277 # bin[h] -= n * (n * bin[h]) 6278 # inlined for speed 6279 scalar = n * binh 6280 binh.x -= n.x * scalar 6281 binh.y -= n.y * scalar 6282 binh.z -= n.z * scalar 6283 binh.normalize() 6284 6285 # tan[h] -= n * (n * tan[h]) 6286 # tan[h] -= bin[h] * (bin[h] * tan[h]) 6287 # inlined for speed 6288 scalar = n * tanh 6289 tanh.x -= n.x * scalar 6290 tanh.y -= n.y * scalar 6291 tanh.z -= n.z * scalar 6292 6293 scalar = binh * tanh 6294 tanh.x -= binh.x * scalar 6295 tanh.y -= binh.y * scalar 6296 tanh.z -= binh.z * scalar 6297 tanh.normalize() 6298 except ZeroDivisionError: 6299 # insuffient data to set tangent space for this vertex 6300 # in that case pick a space 6301 binh = xvec.crossproduct(n) 6302 try: 6303 binh.normalize() 6304 except ZeroDivisionError: 6305 binh = yvec.crossproduct(n) 6306 binh.normalize() # should work now 6307 tanh = n.crossproduct(binh) 6308 6309 # tangent and binormal lists by vertex index 6310 tan = [tan[h] for h in v_hash_map] 6311 bin = [bin[h] for h in v_hash_map] 6312 6313 # find possible extra data block 6314 for extra in self.get_extra_datas(): 6315 if isinstance(extra, NifFormat.NiBinaryExtraData): 6316 if extra.name == 'Tangent space (binormal & tangent vectors)': 6317 break 6318 else: 6319 extra = None 6320 6321 # if autodetection is on, do as_extra only if an extra data block is found 6322 if as_extra is None: 6323 if extra: 6324 as_extra = True 6325 else: 6326 as_extra = False 6327 6328 if as_extra: 6329 # if tangent space extra data already exists, use it 6330 if not extra: 6331 # otherwise, create a new block and link it 6332 extra = NifFormat.NiBinaryExtraData() 6333 extra.name = 'Tangent space (binormal & tangent vectors)' 6334 self.add_extra_data(extra) 6335 6336 # write the data 6337 binarydata = "" 6338 for vec in tan + bin: 6339 # XXX _byte_order!! assuming little endian 6340 binarydata += struct.pack('<fff', vec.x, vec.y, vec.z) 6341 extra.binary_data = binarydata 6342 else: 6343 # set tangent space flag 6344 # XXX used to be 61440 6345 # XXX from Sid Meier's Railroad & Fallout 3 nifs, 4096 is 6346 # XXX sufficient? 6347 self.data.num_uv_sets |= 4096 6348 self.data.bs_num_uv_sets |= 4096 6349 self.data.tangents.update_size() 6350 self.data.bitangents.update_size() 6351 for vec, data_tan in izip(tan, self.data.tangents): 6352 data_tan.x = vec.x 6353 data_tan.y = vec.y 6354 data_tan.z = vec.z 6355 for vec, data_bitan in izip(bin, self.data.bitangents): 6356 data_bitan.x = vec.x 6357 data_bitan.y = vec.y 6358 data_bitan.z = vec.z
6359 6360 # ported from nifskope/skeleton.cpp:spSkinPartition
6361 - def update_skin_partition(self, 6362 maxbonesperpartition=4, maxbonespervertex=4, 6363 verbose=0, stripify=True, stitchstrips=False, 6364 padbones=False, 6365 triangles=None, trianglepartmap=None, 6366 maximize_bone_sharing=False):
6367 """Recalculate skin partition data. 6368 6369 :deprecated: Do not use the verbose argument. 6370 :param maxbonesperpartition: Maximum number of bones in each partition. 6371 The num_bones field will not exceed this number. 6372 :param maxbonespervertex: Maximum number of bones per vertex. 6373 The num_weights_per_vertex field will be exactly equal to this number. 6374 :param verbose: Ignored, and deprecated. Set pyffi's log level instead. 6375 :param stripify: If true, stripify the partitions, otherwise use triangles. 6376 :param stitchstrips: If stripify is true, then set this to true to stitch 6377 the strips. 6378 :param padbones: Enforces the numbones field to be equal to 6379 maxbonesperpartition. Also ensures that the bone indices are unique 6380 and sorted, per vertex. Raises an exception if maxbonespervertex 6381 is not equal to maxbonesperpartition (in that case bone indices cannot 6382 be unique and sorted). This options is required for Freedom Force vs. 6383 the 3rd Reich skin partitions. 6384 :param triangles: The triangles of the partition (if not specified, then 6385 this defaults to C{self.data.get_triangles()}. 6386 :param trianglepartmap: Maps each triangle to a partition index. Faces with 6387 different indices will never appear in the same partition. If the skin 6388 instance is a BSDismemberSkinInstance, then these indices are used as 6389 body part types, and the partitions in the BSDismemberSkinInstance are 6390 updated accordingly. Note that the faces are counted relative to 6391 L{triangles}. 6392 :param maximize_bone_sharing: Maximize bone sharing between partitions. 6393 This option is useful for Fallout 3. 6394 """ 6395 logger = logging.getLogger("pyffi.nif.nitribasedgeom") 6396 6397 # if trianglepartmap not specified, map everything to index 0 6398 if trianglepartmap is None: 6399 trianglepartmap = repeat(0) 6400 6401 # shortcuts relevant blocks 6402 if not self.skin_instance: 6403 # no skin, nothing to do 6404 return 6405 self._validate_skin() 6406 geomdata = self.data 6407 skininst = self.skin_instance 6408 skindata = skininst.data 6409 6410 # get skindata vertex weights 6411 logger.debug("Getting vertex weights.") 6412 weights = self.get_vertex_weights() 6413 6414 # count minimum and maximum number of bones per vertex 6415 minbones = min(len(weight) for weight in weights) 6416 maxbones = max(len(weight) for weight in weights) 6417 if minbones <= 0: 6418 noweights = [v for v, weight in enumerate(weights) 6419 if not weight] 6420 #raise ValueError( 6421 logger.warn( 6422 'bad NiSkinData: some vertices have no weights %s' 6423 % noweights) 6424 logger.info("Counted minimum of %i and maximum of %i bones per vertex" 6425 % (minbones, maxbones)) 6426 6427 # reduce bone influences to meet maximum number of bones per vertex 6428 logger.info("Imposing maximum of %i bones per vertex." % maxbonespervertex) 6429 lostweight = 0.0 6430 for weight in weights: 6431 if len(weight) > maxbonespervertex: 6432 # delete bone influences with least weight 6433 weight.sort(key=lambda x: x[1], reverse=True) # sort by weight 6434 # save lost weight to return to user 6435 lostweight = max( 6436 lostweight, max( 6437 [x[1] for x in weight[maxbonespervertex:]])) 6438 del weight[maxbonespervertex:] # only keep first elements 6439 # normalize 6440 totalweight = sum([x[1] for x in weight]) # sum of all weights 6441 for x in weight: x[1] /= totalweight 6442 maxbones = maxbonespervertex 6443 # sort by again by bone (relied on later when matching vertices) 6444 weight.sort(key=lambda x: x[0]) 6445 6446 # reduce bone influences to meet maximum number of bones per partition 6447 # (i.e. maximum number of bones per triangle) 6448 logger.info( 6449 "Imposing maximum of %i bones per triangle (and hence, per partition)." 6450 % maxbonesperpartition) 6451 6452 if triangles is None: 6453 triangles = geomdata.get_triangles() 6454 6455 for tri in triangles: 6456 while True: 6457 # find the bones influencing this triangle 6458 tribones = [] 6459 for t in tri: 6460 tribones.extend([bonenum for bonenum, boneweight in weights[t]]) 6461 tribones = set(tribones) 6462 # target met? 6463 if len(tribones) <= maxbonesperpartition: 6464 break 6465 # no, need to remove a bone 6466 6467 # sum weights for each bone to find the one that least influences 6468 # this triangle 6469 tribonesweights = {} 6470 for bonenum in tribones: tribonesweights[bonenum] = 0.0 6471 nono = set() # bones with weight 1 cannot be removed 6472 for skinweights in [weights[t] for t in tri]: 6473 # skinweights[0] is the first skinweight influencing vertex t 6474 # and skinweights[0][0] is the bone number of that bone 6475 if len(skinweights) == 1: nono.add(skinweights[0][0]) 6476 for bonenum, boneweight in skinweights: 6477 tribonesweights[bonenum] += boneweight 6478 6479 # select a bone to remove 6480 # first find bones we can remove 6481 6482 # restrict to bones not in the nono set 6483 tribonesweights = [ 6484 x for x in tribonesweights.items() if x[0] not in nono] 6485 if not tribonesweights: 6486 raise ValueError( 6487 "cannot remove anymore bones in this skin; " 6488 "increase maxbonesperpartition and try again") 6489 # sort by vertex weight sum the last element of this list is now a 6490 # candidate for removal 6491 tribonesweights.sort(key=lambda x: x[1], reverse=True) 6492 minbone = tribonesweights[-1][0] 6493 6494 # remove minbone from all vertices of this triangle and from all 6495 # matching vertices 6496 for t in tri: 6497 for tt in [t]: #match[t]: 6498 # remove bone 6499 weight = weights[tt] 6500 for i, (bonenum, boneweight) in enumerate(weight): 6501 if bonenum == minbone: 6502 # save lost weight to return to user 6503 lostweight = max(lostweight, boneweight) 6504 del weight[i] 6505 break 6506 else: 6507 continue 6508 # normalize 6509 totalweight = sum([x[1] for x in weight]) 6510 for x in weight: 6511 x[1] /= totalweight 6512 6513 # split triangles into partitions 6514 logger.info("Creating partitions") 6515 parts = [] 6516 # keep creating partitions as long as there are triangles left 6517 while triangles: 6518 # create a partition 6519 part = [set(), [], None] # bones, triangles, partition index 6520 usedverts = set() 6521 addtriangles = True 6522 # keep adding triangles to it as long as the flag is set 6523 while addtriangles: 6524 # newtriangles is a list of triangles that have not been added to 6525 # the partition, similar for newtrianglepartmap 6526 newtriangles = [] 6527 newtrianglepartmap = [] 6528 for tri, partindex in izip(triangles, trianglepartmap): 6529 # find the bones influencing this triangle 6530 tribones = [] 6531 for t in tri: 6532 tribones.extend([ 6533 bonenum for bonenum, boneweight in weights[t]]) 6534 tribones = set(tribones) 6535 # if part has no bones, 6536 # or if part has all bones of tribones and index coincides 6537 # then add this triangle to this part 6538 if ((not part[0]) 6539 or ((part[0] >= tribones) and (part[2] == partindex))): 6540 part[0] |= tribones 6541 part[1].append(tri) 6542 usedverts |= set(tri) 6543 # if part was empty, assign it the index 6544 if part[2] is None: 6545 part[2] = partindex 6546 else: 6547 newtriangles.append(tri) 6548 newtrianglepartmap.append(partindex) 6549 triangles = newtriangles 6550 trianglepartmap = newtrianglepartmap 6551 6552 # if we have room left in the partition 6553 # then add adjacent triangles 6554 addtriangles = False 6555 newtriangles = [] 6556 newtrianglepartmap = [] 6557 if len(part[0]) < maxbonesperpartition: 6558 for tri, partindex in izip(triangles, trianglepartmap): 6559 # if triangle is adjacent, and has same index 6560 # then check if it can be added to the partition 6561 if (usedverts & set(tri)) and (part[2] == partindex): 6562 # find the bones influencing this triangle 6563 tribones = [] 6564 for t in tri: 6565 tribones.extend([ 6566 bonenum for bonenum, boneweight in weights[t]]) 6567 tribones = set(tribones) 6568 # and check if we exceed the maximum number of allowed 6569 # bones 6570 if len(part[0] | tribones) <= maxbonesperpartition: 6571 part[0] |= tribones 6572 part[1].append(tri) 6573 usedverts |= set(tri) 6574 # signal another try in adding triangles to 6575 # the partition 6576 addtriangles = True 6577 else: 6578 newtriangles.append(tri) 6579 newtrianglepartmap.append(partindex) 6580 else: 6581 newtriangles.append(tri) 6582 newtrianglepartmap.append(partindex) 6583 triangles = newtriangles 6584 trianglepartmap = newtrianglepartmap 6585 6586 parts.append(part) 6587 6588 logger.info("Created %i small partitions." % len(parts)) 6589 6590 # merge all partitions 6591 logger.info("Merging partitions.") 6592 merged = True # signals success, in which case do another run 6593 while merged: 6594 merged = False 6595 # newparts is to contain the updated merged partitions as we go 6596 newparts = [] 6597 # addedparts is the set of all partitions from parts that have been 6598 # added to newparts 6599 addedparts = set() 6600 # try all combinations 6601 for a, parta in enumerate(parts): 6602 if a in addedparts: 6603 continue 6604 newparts.append(parta) 6605 addedparts.add(a) 6606 for b, partb in enumerate(parts): 6607 if b <= a: 6608 continue 6609 if b in addedparts: 6610 continue 6611 # if partition indices are the same, and bone limit is not 6612 # exceeded, merge them 6613 if ((parta[2] == partb[2]) 6614 and (len(parta[0] | partb[0]) <= maxbonesperpartition)): 6615 parta[0] |= partb[0] 6616 parta[1] += partb[1] 6617 addedparts.add(b) 6618 merged = True # signal another try in merging partitions 6619 # update partitions to the merged partitions 6620 parts = newparts 6621 6622 # write the NiSkinPartition 6623 logger.info("Skin has %i partitions." % len(parts)) 6624 6625 # if skin partition already exists, use it 6626 if skindata.skin_partition != None: 6627 skinpart = skindata.skin_partition 6628 skininst.skin_partition = skinpart 6629 elif skininst.skin_partition != None: 6630 skinpart = skininst.skin_partition 6631 skindata.skin_partition = skinpart 6632 else: 6633 # otherwise, create a new block and link it 6634 skinpart = NifFormat.NiSkinPartition() 6635 skindata.skin_partition = skinpart 6636 skininst.skin_partition = skinpart 6637 6638 # set number of partitions 6639 skinpart.num_skin_partition_blocks = len(parts) 6640 skinpart.skin_partition_blocks.update_size() 6641 6642 # maximize bone sharing, if requested 6643 if maximize_bone_sharing: 6644 logger.info("Maximizing shared bones.") 6645 # new list of partitions, sorted to maximize bone sharing 6646 newparts = [] 6647 # as long as there are parts to add 6648 while parts: 6649 # current set of partitions with shared bones 6650 # starts a new set of partitions with shared bones 6651 sharedparts = [parts.pop()] 6652 sharedboneset = sharedparts[0][0] 6653 # go over all other partitions, and try to add them with 6654 # shared bones 6655 oldparts = parts[:] 6656 parts = [] 6657 for otherpart in oldparts: 6658 # check if bones can be added 6659 if len(sharedboneset | otherpart[0]) <= maxbonesperpartition: 6660 # ok, we can share bones! 6661 # update set of shared bones 6662 sharedboneset |= otherpart[0] 6663 # add this other partition to list of shared parts 6664 sharedparts.append(otherpart) 6665 # update bone set in all shared parts 6666 for sharedpart in sharedparts: 6667 sharedpart[0] = sharedboneset 6668 else: 6669 # not added to sharedparts, 6670 # so we must keep it for the next iteration 6671 parts.append(otherpart) 6672 # update list of partitions 6673 newparts.extend(sharedparts) 6674 6675 # store update 6676 parts = newparts 6677 6678 # for Fallout 3, set dismember partition indices 6679 if isinstance(skininst, NifFormat.BSDismemberSkinInstance): 6680 skininst.num_partitions = len(parts) 6681 skininst.partitions.update_size() 6682 lastpart = None 6683 for bodypart, part in izip(skininst.partitions, parts): 6684 bodypart.body_part = part[2] 6685 if (lastpart is None) or (lastpart[0] != part[0]): 6686 # start new bone set, if bones are not shared 6687 bodypart.part_flag.start_new_boneset = 1 6688 else: 6689 # do not start new bone set 6690 bodypart.part_flag.start_new_boneset = 0 6691 # caps are invisible 6692 bodypart.part_flag.editor_visible = (part[2] < 100 6693 or part[2] >= 1000) 6694 # store part for next iteration 6695 lastpart = part 6696 6697 for skinpartblock, part in zip(skinpart.skin_partition_blocks, parts): 6698 # get sorted list of bones 6699 bones = sorted(list(part[0])) 6700 triangles = part[1] 6701 logger.info("Optimizing triangle ordering in partition %i" 6702 % parts.index(part)) 6703 # optimize triangles for vertex cache and calculate strips 6704 triangles = pyffi.utils.vertex_cache.get_cache_optimized_triangles( 6705 triangles) 6706 strips = pyffi.utils.vertex_cache.stable_stripify( 6707 triangles, stitchstrips=stitchstrips) 6708 triangles_size = 3 * len(triangles) 6709 strips_size = len(strips) + sum(len(strip) for strip in strips) 6710 vertices = [] 6711 # decide whether to use strip or triangles as primitive 6712 if stripify is None: 6713 stripifyblock = ( 6714 strips_size < triangles_size 6715 and all(len(strip) < 65536 for strip in strips)) 6716 else: 6717 stripifyblock = stripify 6718 if stripifyblock: 6719 # stripify the triangles 6720 # also update triangle list 6721 numtriangles = 0 6722 # calculate number of triangles and get sorted 6723 # list of vertices 6724 # for optimal performance, vertices must be sorted 6725 # by strip 6726 for strip in strips: 6727 numtriangles += len(strip) - 2 6728 for t in strip: 6729 if t not in vertices: 6730 vertices.append(t) 6731 else: 6732 numtriangles = len(triangles) 6733 # get sorted list of vertices 6734 # for optimal performance, vertices must be sorted 6735 # by triangle 6736 for tri in triangles: 6737 for t in tri: 6738 if t not in vertices: 6739 vertices.append(t) 6740 # set all the data 6741 skinpartblock.num_vertices = len(vertices) 6742 skinpartblock.num_triangles = numtriangles 6743 if not padbones: 6744 skinpartblock.num_bones = len(bones) 6745 else: 6746 if maxbonesperpartition != maxbonespervertex: 6747 raise ValueError( 6748 "when padding bones maxbonesperpartition must be " 6749 "equal to maxbonespervertex") 6750 # freedom force vs. the 3rd reich needs exactly 4 bones per 6751 # partition on every partition block 6752 skinpartblock.num_bones = maxbonesperpartition 6753 if stripifyblock: 6754 skinpartblock.num_strips = len(strips) 6755 else: 6756 skinpartblock.num_strips = 0 6757 # maxbones would be enough as num_weights_per_vertex but the Gamebryo 6758 # engine doesn't like that, it seems to want exactly 4 even if there 6759 # are fewer 6760 skinpartblock.num_weights_per_vertex = maxbonespervertex 6761 skinpartblock.bones.update_size() 6762 for i, bonenum in enumerate(bones): 6763 skinpartblock.bones[i] = bonenum 6764 for i in xrange(len(bones), skinpartblock.num_bones): 6765 skinpartblock.bones[i] = 0 # dummy bone slots refer to first bone 6766 skinpartblock.has_vertex_map = True 6767 skinpartblock.vertex_map.update_size() 6768 for i, v in enumerate(vertices): 6769 skinpartblock.vertex_map[i] = v 6770 skinpartblock.has_vertex_weights = True 6771 skinpartblock.vertex_weights.update_size() 6772 for i, v in enumerate(vertices): 6773 for j in xrange(skinpartblock.num_weights_per_vertex): 6774 if j < len(weights[v]): 6775 skinpartblock.vertex_weights[i][j] = weights[v][j][1] 6776 else: 6777 skinpartblock.vertex_weights[i][j] = 0.0 6778 if stripifyblock: 6779 skinpartblock.has_faces = True 6780 skinpartblock.strip_lengths.update_size() 6781 for i, strip in enumerate(strips): 6782 skinpartblock.strip_lengths[i] = len(strip) 6783 skinpartblock.strips.update_size() 6784 for i, strip in enumerate(strips): 6785 for j, v in enumerate(strip): 6786 skinpartblock.strips[i][j] = vertices.index(v) 6787 else: 6788 skinpartblock.has_faces = True 6789 # clear strip lengths array 6790 skinpartblock.strip_lengths.update_size() 6791 # clear strips array 6792 skinpartblock.strips.update_size() 6793 skinpartblock.triangles.update_size() 6794 for i, (v_1,v_2,v_3) in enumerate(triangles): 6795 skinpartblock.triangles[i].v_1 = vertices.index(v_1) 6796 skinpartblock.triangles[i].v_2 = vertices.index(v_2) 6797 skinpartblock.triangles[i].v_3 = vertices.index(v_3) 6798 skinpartblock.has_bone_indices = True 6799 skinpartblock.bone_indices.update_size() 6800 for i, v in enumerate(vertices): 6801 # the boneindices set keeps track of indices that have not been 6802 # used yet 6803 boneindices = set(range(skinpartblock.num_bones)) 6804 for j in xrange(len(weights[v])): 6805 skinpartblock.bone_indices[i][j] = bones.index(weights[v][j][0]) 6806 boneindices.remove(skinpartblock.bone_indices[i][j]) 6807 for j in xrange(len(weights[v]),skinpartblock.num_weights_per_vertex): 6808 if padbones: 6809 # if padbones is True then we have enforced 6810 # num_bones == num_weights_per_vertex so this will not trigger 6811 # a KeyError 6812 skinpartblock.bone_indices[i][j] = boneindices.pop() 6813 else: 6814 skinpartblock.bone_indices[i][j] = 0 6815 6816 # sort weights 6817 for i, v in enumerate(vertices): 6818 vweights = [] 6819 for j in xrange(skinpartblock.num_weights_per_vertex): 6820 vweights.append([ 6821 skinpartblock.bone_indices[i][j], 6822 skinpartblock.vertex_weights[i][j]]) 6823 if padbones: 6824 # by bone index (for ffvt3r) 6825 vweights.sort(key=lambda w: w[0]) 6826 else: 6827 # by weight (for fallout 3, largest weight first) 6828 vweights.sort(key=lambda w: -w[1]) 6829 for j in xrange(skinpartblock.num_weights_per_vertex): 6830 skinpartblock.bone_indices[i][j] = vweights[j][0] 6831 skinpartblock.vertex_weights[i][j] = vweights[j][1] 6832 6833 return lostweight
6834 6835 # ported from nifskope/skeleton.cpp:spFixBoneBounds
6836 - def update_skin_center_radius(self):
6837 """Update centers and radii of all skin data fields.""" 6838 # shortcuts relevant blocks 6839 if not self.skin_instance: 6840 return # no skin, nothing to do 6841 self._validate_skin() 6842 geomdata = self.data 6843 skininst = self.skin_instance 6844 skindata = skininst.data 6845 6846 verts = geomdata.vertices 6847 6848 for skindatablock in skindata.bone_list: 6849 # find all vertices influenced by this bone 6850 boneverts = [verts[skinweight.index] 6851 for skinweight in skindatablock.vertex_weights] 6852 6853 # find bounding box of these vertices 6854 low = NifFormat.Vector3() 6855 low.x = min(v.x for v in boneverts) 6856 low.y = min(v.y for v in boneverts) 6857 low.z = min(v.z for v in boneverts) 6858 6859 high = NifFormat.Vector3() 6860 high.x = max(v.x for v in boneverts) 6861 high.y = max(v.y for v in boneverts) 6862 high.z = max(v.z for v in boneverts) 6863 6864 # center is in the center of the bounding box 6865 center = (low + high) * 0.5 6866 6867 # radius is the largest distance from the center 6868 r2 = 0.0 6869 for v in boneverts: 6870 d = center - v 6871 r2 = max(r2, d.x*d.x+d.y*d.y+d.z*d.z) 6872 radius = r2 ** 0.5 6873 6874 # transform center in proper coordinates (radius remains unaffected) 6875 center *= skindatablock.get_transform() 6876 6877 # save data 6878 skindatablock.bounding_sphere_offset.x = center.x 6879 skindatablock.bounding_sphere_offset.y = center.y 6880 skindatablock.bounding_sphere_offset.z = center.z 6881 skindatablock.bounding_sphere_radius = radius
6882
6883 - def get_interchangeable_tri_shape(self, triangles=None):
6884 """Returns a NiTriShape block that is geometrically 6885 interchangeable. If you do not want to set the triangles 6886 from the original shape, use the triangles argument. 6887 """ 6888 # copy the shape (first to NiTriBasedGeom and then to NiTriShape) 6889 shape = NifFormat.NiTriShape().deepcopy( 6890 NifFormat.NiTriBasedGeom().deepcopy(self)) 6891 # copy the geometry without strips 6892 shapedata = NifFormat.NiTriShapeData().deepcopy( 6893 NifFormat.NiTriBasedGeomData().deepcopy(self.data)) 6894 # update the shape data 6895 if triangles is None: 6896 shapedata.set_triangles(self.data.get_triangles()) 6897 else: 6898 shapedata.set_triangles(triangles) 6899 # relink the shape data 6900 shape.data = shapedata 6901 # and return the result 6902 return shape
6903
6904 - def get_interchangeable_tri_strips(self, strips=None):
6905 """Returns a NiTriStrips block that is geometrically 6906 interchangeable. If you do not want to set the strips 6907 from the original shape, use the strips argument. 6908 """ 6909 # copy the shape (first to NiTriBasedGeom and then to NiTriStrips) 6910 strips_ = NifFormat.NiTriStrips().deepcopy( 6911 NifFormat.NiTriBasedGeom().deepcopy(self)) 6912 # copy the geometry without triangles 6913 stripsdata = NifFormat.NiTriStripsData().deepcopy( 6914 NifFormat.NiTriBasedGeomData().deepcopy(self.data)) 6915 # update the shape data 6916 if strips is None: 6917 stripsdata.set_strips(self.data.get_strips()) 6918 else: 6919 stripsdata.set_strips(strips) 6920 # relink the shape data 6921 strips_.data = stripsdata 6922 # and return the result 6923 return strips_
6924
6925 - class NiTriShapeData:
6926 """ 6927 Example usage: 6928 6929 >>> from pyffi.formats.nif import NifFormat 6930 >>> block = NifFormat.NiTriShapeData() 6931 >>> block.set_triangles([(0,1,2),(2,1,3),(2,3,4)]) 6932 >>> block.get_strips() 6933 [[0, 1, 2, 3, 4]] 6934 >>> block.get_triangles() 6935 [(0, 1, 2), (2, 1, 3), (2, 3, 4)] 6936 >>> block.set_strips([[1,0,1,2,3,4]]) 6937 >>> block.get_strips() # stripifier keeps geometry but nothing else 6938 [[0, 2, 1, 3], [2, 4, 3]] 6939 >>> block.get_triangles() 6940 [(0, 2, 1), (1, 2, 3), (2, 4, 3)] 6941 """
6942 - def get_triangles(self):
6943 return [(t.v_1, t.v_2, t.v_3) for t in self.triangles]
6944
6945 - def set_triangles(self, triangles, stitchstrips = False):
6946 # note: the stitchstrips argument is ignored - only present to ensure 6947 # uniform interface between NiTriShapeData and NiTriStripsData 6948 6949 # initialize triangle array 6950 n = len(triangles) 6951 self.num_triangles = n 6952 self.num_triangle_points = 3*n 6953 self.has_triangles = (n > 0) 6954 self.triangles.update_size() 6955 6956 # copy triangles 6957 src = triangles.__iter__() 6958 dst = self.triangles.__iter__() 6959 for k in xrange(n): 6960 dst_t = dst.next() 6961 dst_t.v_1, dst_t.v_2, dst_t.v_3 = src.next()
6962
6963 - def get_strips(self):
6965
6966 - def set_strips(self, strips):
6967 self.set_triangles(pyffi.utils.tristrip.triangulate(strips))
6968
6969 - class NiTriStripsData:
6970 """ 6971 Example usage: 6972 6973 >>> from pyffi.formats.nif import NifFormat 6974 >>> block = NifFormat.NiTriStripsData() 6975 >>> block.set_triangles([(0,1,2),(2,1,3),(2,3,4)]) 6976 >>> block.get_strips() 6977 [[0, 1, 2, 3, 4]] 6978 >>> block.get_triangles() 6979 [(0, 1, 2), (1, 3, 2), (2, 3, 4)] 6980 >>> block.set_strips([[1,0,1,2,3,4]]) 6981 >>> block.get_strips() 6982 [[1, 0, 1, 2, 3, 4]] 6983 >>> block.get_triangles() 6984 [(0, 2, 1), (1, 2, 3), (2, 4, 3)] 6985 """
6986 - def get_triangles(self):
6987 return pyffi.utils.tristrip.triangulate(self.points)
6988
6989 - def set_triangles(self, triangles, stitchstrips = False):
6990 self.set_strips(pyffi.utils.vertex_cache.stripify( 6991 triangles, stitchstrips=stitchstrips))
6992
6993 - def get_strips(self):
6994 return [[i for i in strip] for strip in self.points]
6995
6996 - def set_strips(self, strips):
6997 # initialize strips array 6998 self.num_strips = len(strips) 6999 self.strip_lengths.update_size() 7000 numtriangles = 0 7001 for i, strip in enumerate(strips): 7002 self.strip_lengths[i] = len(strip) 7003 numtriangles += len(strip) - 2 7004 self.num_triangles = numtriangles 7005 self.points.update_size() 7006 self.has_points = (len(strips) > 0) 7007 7008 # copy strips 7009 for i, strip in enumerate(strips): 7010 for j, idx in enumerate(strip): 7011 self.points[i][j] = idx
7012
7013 - class RagdollDescriptor:
7014 - def update_a_b(self, transform):
7015 """Update B pivot and axes from A using the given transform.""" 7016 # pivot point 7017 pivot_b = ((7 * self.pivot_a.get_vector_3()) * transform) / 7.0 7018 self.pivot_b.x = pivot_b.x 7019 self.pivot_b.y = pivot_b.y 7020 self.pivot_b.z = pivot_b.z 7021 # axes (rotation only) 7022 transform = transform.get_matrix_33() 7023 plane_b = self.plane_a.get_vector_3() * transform 7024 twist_b = self.twist_a.get_vector_3() * transform 7025 self.plane_b.x = plane_b.x 7026 self.plane_b.y = plane_b.y 7027 self.plane_b.z = plane_b.z 7028 self.twist_b.x = twist_b.x 7029 self.twist_b.y = twist_b.y 7030 self.twist_b.z = twist_b.z
7031
7032 - class SkinData:
7033 - def get_transform(self):
7034 """Return scale, rotation, and translation into a single 4x4 matrix.""" 7035 m = NifFormat.Matrix44() 7036 m.set_scale_rotation_translation(self.scale, self.rotation, self.translation) 7037 return m
7038
7039 - def set_transform(self, m):
7040 """Set rotation, transform, and velocity.""" 7041 scale, rotation, translation = m.get_scale_rotation_translation() 7042 7043 self.scale = scale 7044 7045 self.rotation.m_11 = rotation.m_11 7046 self.rotation.m_12 = rotation.m_12 7047 self.rotation.m_13 = rotation.m_13 7048 self.rotation.m_21 = rotation.m_21 7049 self.rotation.m_22 = rotation.m_22 7050 self.rotation.m_23 = rotation.m_23 7051 self.rotation.m_31 = rotation.m_31 7052 self.rotation.m_32 = rotation.m_32 7053 self.rotation.m_33 = rotation.m_33 7054 7055 self.translation.x = translation.x 7056 self.translation.y = translation.y 7057 self.translation.z = translation.z
7058
7059 - class StringPalette:
7060 - def get_string(self, offset):
7061 """Return string at given offset. 7062 7063 >>> from pyffi.formats.nif import NifFormat 7064 >>> pal = NifFormat.StringPalette() 7065 >>> pal.add_string("abc") 7066 0 7067 >>> pal.add_string("def") 7068 4 7069 >>> print(pal.get_string(0).decode("ascii")) 7070 abc 7071 >>> print(pal.get_string(4).decode("ascii")) 7072 def 7073 >>> pal.get_string(5) # doctest: +ELLIPSIS 7074 Traceback (most recent call last): 7075 ... 7076 ValueError: ... 7077 >>> pal.get_string(100) # doctest: +ELLIPSIS 7078 Traceback (most recent call last): 7079 ... 7080 ValueError: ... 7081 """ 7082 _b00 = pyffi.object_models.common._b00 # shortcut 7083 # check that offset isn't too large 7084 if offset >= len(self.palette): 7085 raise ValueError( 7086 "StringPalette: getting string at %i " 7087 "but palette is only %i long" 7088 % (offset, len(self.palette))) 7089 # check that a string starts at this offset 7090 if offset > 0 and self.palette[offset-1:offset] != _b00: 7091 raise ValueError( 7092 "StringPalette: no string starts at offset %i " 7093 "(palette is %s)" % (offset, self.palette)) 7094 # return the string 7095 return self.palette[offset:self.palette.find(_b00, offset)]
7096
7097 - def get_all_strings(self):
7098 """Return a list of all strings. 7099 7100 >>> from pyffi.formats.nif import NifFormat 7101 >>> pal = NifFormat.StringPalette() 7102 >>> pal.add_string("abc") 7103 0 7104 >>> pal.add_string("def") 7105 4 7106 >>> for x in pal.get_all_strings(): 7107 ... print(x.decode("ascii")) 7108 abc 7109 def 7110 >>> # pal.palette.decode("ascii") needs lstrip magic for py3k 7111 >>> print(repr(pal.palette.decode("ascii")).lstrip("u")) 7112 'abc\\x00def\\x00' 7113 """ 7114 _b00 = pyffi.object_models.common._b00 # shortcut 7115 return self.palette[:-1].split(_b00)
7116
7117 - def add_string(self, text):
7118 """Adds string to palette (will recycle existing strings if possible) and 7119 return offset to the string in the palette. 7120 7121 >>> from pyffi.formats.nif import NifFormat 7122 >>> pal = NifFormat.StringPalette() 7123 >>> pal.add_string("abc") 7124 0 7125 >>> pal.add_string("abc") 7126 0 7127 >>> pal.add_string("def") 7128 4 7129 >>> pal.add_string("") 7130 -1 7131 >>> print(pal.get_string(4).decode("ascii")) 7132 def 7133 """ 7134 # empty text 7135 if not text: 7136 return -1 7137 _b00 = pyffi.object_models.common._b00 # shortcut 7138 # convert text to bytes if necessary 7139 text = pyffi.object_models.common._as_bytes(text) 7140 # check if string is already in the palette 7141 # ... at the start 7142 if text + _b00 == self.palette[:len(text) + 1]: 7143 return 0 7144 # ... or elsewhere 7145 offset = self.palette.find(_b00 + text + _b00) 7146 if offset != -1: 7147 return offset + 1 7148 # if no match, add the string 7149 if offset == -1: 7150 offset = len(self.palette) 7151 self.palette = self.palette + text + _b00 7152 self.length += len(text) + 1 7153 # return the offset 7154 return offset
7155
7156 - def clear(self):
7157 """Clear all strings in the palette. 7158 7159 >>> from pyffi.formats.nif import NifFormat 7160 >>> pal = NifFormat.StringPalette() 7161 >>> pal.add_string("abc") 7162 0 7163 >>> pal.add_string("def") 7164 4 7165 >>> # pal.palette.decode("ascii") needs lstrip magic for py3k 7166 >>> print(repr(pal.palette.decode("ascii")).lstrip("u")) 7167 'abc\\x00def\\x00' 7168 >>> pal.clear() 7169 >>> # pal.palette.decode("ascii") needs lstrip magic for py3k 7170 >>> print(repr(pal.palette.decode("ascii")).lstrip("u")) 7171 '' 7172 """ 7173 self.palette = pyffi.object_models.common._b # empty bytes object 7174 self.length = 0
7175
7176 - class TexCoord:
7177 - def as_list(self):
7178 return [self.u, self.v]
7179
7180 - def normalize(self):
7181 r = (self.u*self.u + self.v*self.v) ** 0.5 7182 if r < NifFormat.EPSILON: 7183 raise ZeroDivisionError('cannot normalize vector %s'%self) 7184 self.u /= r 7185 self.v /= r
7186
7187 - def __str__(self):
7188 return "[ %6.3f %6.3f ]"%(self.u, self.v)
7189
7190 - def __mul__(self, x):
7191 if isinstance(x, (float, int, long)): 7192 v = NifFormat.TexCoord() 7193 v.u = self.u * x 7194 v.v = self.v * x 7195 return v 7196 elif isinstance(x, NifFormat.TexCoord): 7197 return self.u * x.u + self.v * x.v 7198 else: 7199 raise TypeError("do not know how to multiply TexCoord with %s"%x.__class__)
7200
7201 - def __rmul__(self, x):
7202 if isinstance(x, (float, int, long)): 7203 v = NifFormat.TexCoord() 7204 v.u = x * self.u 7205 v.v = x * self.v 7206 return v 7207 else: 7208 raise TypeError("do not know how to multiply %s and TexCoord"%x.__class__)
7209
7210 - def __add__(self, x):
7211 if isinstance(x, (float, int, long)): 7212 v = NifFormat.TexCoord() 7213 v.u = self.u + x 7214 v.v = self.v + x 7215 return v 7216 elif isinstance(x, NifFormat.TexCoord): 7217 v = NifFormat.TexCoord() 7218 v.u = self.u + x.u 7219 v.v = self.v + x.v 7220 return v 7221 else: 7222 raise TypeError("do not know how to add TexCoord and %s"%x.__class__)
7223
7224 - def __radd__(self, x):
7225 if isinstance(x, (float, int, long)): 7226 v = NifFormat.TexCoord() 7227 v.u = x + self.u 7228 v.v = x + self.v 7229 return v 7230 else: 7231 raise TypeError("do not know how to add %s and TexCoord"%x.__class__)
7232
7233 - def __sub__(self, x):
7234 if isinstance(x, (float, int, long)): 7235 v = NifFormat.TexCoord() 7236 v.u = self.u - x 7237 v.v = self.v - x 7238 return v 7239 elif isinstance(x, NifFormat.TexCoord): 7240 v = NifFormat.TexCoord() 7241 v.u = self.u - x.u 7242 v.v = self.v - x.v 7243 return v 7244 else: 7245 raise TypeError("do not know how to substract TexCoord and %s"%x.__class__)
7246
7247 - def __rsub__(self, x):
7248 if isinstance(x, (float, int, long)): 7249 v = NifFormat.TexCoord() 7250 v.u = x - self.u 7251 v.v = x - self.v 7252 return v 7253 else: 7254 raise TypeError("do not know how to substract %s and TexCoord"%x.__class__)
7255
7256 - def __neg__(self):
7257 v = NifFormat.TexCoord() 7258 v.u = -self.u 7259 v.v = -self.v 7260 return v
7261
7262 - class NiPSysData:
7263 - def _get_filtered_attribute_list(self, data=None):
7264 # simple hack to act as if we force num_vertices = 0 7265 for attr in StructBase._get_filtered_attribute_list(self, data): 7266 if data and (attr.name in ["vertices", 7267 "normals", "tangents", "bitangents", 7268 "vertex_colors", "uv_sets"]): 7269 if data.version >= 0x14020007 and data.user_version == 11: 7270 continue 7271 yield attr
7272 7273 if __name__=='__main__': 7274 import doctest 7275 doctest.testmod() 7276